void FORTE_BL_PWM::executeEvent(int pa_nEIID){ bool bRet = true; switch(pa_nEIID){ case scm_nEventINITID: if(m_poPWM != NULL){ //Fix output before stopping the PWM //TODO:Handle the PWM output level when stopping it m_poPWM->setRunState(BlackLib::stop); delete m_poPWM; m_poPWM = NULL; } if(QI() == true){ char * pacBuffer = new char[PARAMS().length() + 3]; PARAMS().toString(pacBuffer, sizeof(char)*(PARAMS().length() + 3)); if (strncmp(pacBuffer, "\'EHRPWM2B\'", 10) == 0) { m_poPWM = new BlackLib::BlackPWM(BlackLib::EHRPWM2B); } else if (strncmp(pacBuffer, "\'EHRPWM2A\'", 10) == 0) { m_poPWM = new BlackLib::BlackPWM(BlackLib::EHRPWM2A); } else if (strncmp(pacBuffer, "\'EHRPWM1A\'", 10) == 0) { m_poPWM = new BlackLib::BlackPWM(BlackLib::EHRPWM1A); } else if (strncmp(pacBuffer, "\'EHRPWM1B\'", 10) == 0) { m_poPWM = new BlackLib::BlackPWM(BlackLib::EHRPWM1B); } else if (strncmp(pacBuffer, "\'EHRPWM0B\'", 10) == 0) { m_poPWM = new BlackLib::BlackPWM(BlackLib::EHRPWM0B); } else if (strncmp(pacBuffer, "\'EHRPWM0A\'", 10) == 0) { m_poPWM = new BlackLib::BlackPWM(BlackLib::EHRPWM0A); } else if (strncmp(pacBuffer, "\'ECAP0\'", 7) == 0) { m_poPWM = new BlackLib::BlackPWM(BlackLib::ECAP0); } else { delete[] pacBuffer; DEVLOG_ERROR("Invalid input PARAM in BL_PWM FB\n"); STATUS() = "Input PARAM error"; QO() = false; sendOutputEvent(scm_nEventINITOID); break; } delete[] pacBuffer; bRet = m_poPWM->setRunState(BlackLib::stop); bRet = bRet && m_poPWM->setDutyPercent(0.0); bRet = bRet && m_poPWM->setPeriodTime(Period().getInMicroSeconds(), BlackLib::microsecond); bRet = bRet && m_poPWM->setDutyPercent(Duty()); if(Polarity() == true){ bRet = bRet && m_poPWM->setPolarity(BlackLib::straight); }else{ bRet = bRet && m_poPWM->setPolarity(BlackLib::reverse); } bRet = bRet && m_poPWM->setRunState(BlackLib::run); if(bRet == true){ //PWM configuration OK STATUS() ="OK"; QO() = true; }else{ //PWM configuration Error DEVLOG_DEBUG("Invalid configuration parameters in BL_PWM FB\n"); STATUS() ="Configuration Error"; QO() = false; } }else{ STATUS() = "INIT- OK"; QO() = false; } sendOutputEvent(scm_nEventINITOID); break; case scm_nEventUpdateID: if (m_poPWM == NULL){ //PWM must be initialized before updating configuration DEVLOG_ERROR("Trying to update before initializing PWM\n"); STATUS() = "ERROR updating PWM"; QO()=false; sendOutputEvent(scm_nEventCNFID); break; } bRet = m_poPWM->setDutyPercent(0.0); bRet = bRet && m_poPWM->setPeriodTime(Period().getInMicroSeconds(), BlackLib::microsecond); bRet = bRet && m_poPWM->setDutyPercent(Duty()); if(Polarity() == false){ //BlackLib polarity seems to be inverse; For us straight is HIGH->LOW bRet = bRet && m_poPWM->setPolarity(BlackLib::straight); }else{ bRet = bRet && m_poPWM->setPolarity(BlackLib::reverse); } if(bRet == true){ //PWM configuration OK STATUS() ="OK"; QO() = true; }else{ //PWM configuration Error DEVLOG_DEBUG("Invalid configuration parameters in BL_PWM FB\n"); STATUS() ="Configuration Error"; QO() = false; } sendOutputEvent(scm_nEventCNFID); break; case scm_nEventStartID: if(m_poPWM != NULL){ if(m_poPWM->setRunState(BlackLib::run)){ QI() = true; STATUS() = "Running"; }else{ //Could not start PWM DEVLOG_ERROR("Unable to run PWM\n"); STATUS() = "ERROR running PWM"; QO()=false; } }else{ QI() = false; STATUS() = "PWM not initialized"; } sendOutputEvent(scm_nEventCNFID); break; case scm_nEventStopID: if (m_poPWM != NULL){ //TODO:Handle the PWM output level when stopping it if(m_poPWM->setRunState(BlackLib::stop)){ STATUS() = "PWM Stopped"; QO()=true; }else{ //Could not stop PWM DEVLOG_ERROR("Unable to stop PWM\n"); STATUS() = "ERROR stopping PWM"; QO()=false; } }else{ QI() = false; STATUS() = "PWM not initialized"; } sendOutputEvent(scm_nEventCNFID); break; } }
MakeOIS& MakeOIS::withTerminationDate(const Date& terminationDate) { terminationDate_ = terminationDate; swapTenor_ = Period(); return *this; }
ThresholdState AbstractThresholdConditionChecker::GetStateFor(const CBlockIndex* pindexPrev, const Consensus::Params& params, ThresholdConditionCache& cache) const { int nPeriod = Period(params); int nThreshold = Threshold(params); int64_t nTimeStart = BeginTime(params); int64_t nTimeTimeout = EndTime(params); // Check if this deployment is always active. if (nTimeStart == Consensus::BIP9Deployment::ALWAYS_ACTIVE) { return THRESHOLD_ACTIVE; } // A block's state is always the same as that of the first of its period, so it is computed based on a pindexPrev whose height equals a multiple of nPeriod - 1. if (pindexPrev != nullptr) { pindexPrev = pindexPrev->GetAncestor(pindexPrev->nHeight - ((pindexPrev->nHeight + 1) % nPeriod)); } // Walk backwards in steps of nPeriod to find a pindexPrev whose information is known std::vector<const CBlockIndex*> vToCompute; while (cache.count(pindexPrev) == 0) { if (pindexPrev == nullptr) { // The genesis block is by definition defined. cache[pindexPrev] = THRESHOLD_DEFINED; break; } if (pindexPrev->GetMedianTimePast() < nTimeStart) { // Optimization: don't recompute down further, as we know every earlier block will be before the start time cache[pindexPrev] = THRESHOLD_DEFINED; break; } vToCompute.push_back(pindexPrev); pindexPrev = pindexPrev->GetAncestor(pindexPrev->nHeight - nPeriod); } // At this point, cache[pindexPrev] is known assert(cache.count(pindexPrev)); ThresholdState state = cache[pindexPrev]; // Now walk forward and compute the state of descendants of pindexPrev while (!vToCompute.empty()) { ThresholdState stateNext = state; pindexPrev = vToCompute.back(); vToCompute.pop_back(); switch (state) { case THRESHOLD_DEFINED: { if (pindexPrev->GetMedianTimePast() >= nTimeTimeout) { stateNext = THRESHOLD_FAILED; } else if (pindexPrev->GetMedianTimePast() >= nTimeStart) { stateNext = THRESHOLD_STARTED; } break; } case THRESHOLD_STARTED: { if (pindexPrev->GetMedianTimePast() >= nTimeTimeout) { stateNext = THRESHOLD_FAILED; break; } // We need to count const CBlockIndex* pindexCount = pindexPrev; int count = 0; for (int i = 0; i < nPeriod; i++) { if (Condition(pindexCount, params)) { count++; } pindexCount = pindexCount->pprev; } if (count >= nThreshold) { stateNext = THRESHOLD_LOCKED_IN; } break; } case THRESHOLD_LOCKED_IN: { // Always progresses into ACTIVE. stateNext = THRESHOLD_ACTIVE; break; } case THRESHOLD_FAILED: case THRESHOLD_ACTIVE: { // Nothing happens, these are terminal states. break; } } cache[pindexPrev] = state = stateNext; } return state; }
MakeVanillaSwap& MakeVanillaSwap::withTerminationDate(const Date& terminationDate) { terminationDate_ = terminationDate; swapTenor_ = Period(); return *this; }
bool CREAMClient::createDelegation(const std::string& delegation_id, const std::string& proxy) { logger.msg(VERBOSE, "Creating delegation"); action = "getProxyReq"; PayloadSOAP req(cream_ns); req.NewChild("deleg:" + action).NewChild("delegationID") = delegation_id; XMLNode response; if (!process(req, response, "http://www.gridsite.org/namespaces/delegation-2/")) return false; std::string proxyRequestStr = (std::string)response["getProxyReqReturn"]; if (proxyRequestStr.empty()) { logger.msg(VERBOSE, "Malformed response: missing getProxyReqReturn"); return false; } //Sign the proxy certificate Credential signer(proxy, "", cadir, cafile); std::string signedCert; // TODO: Hardcoded time shift - VERY BAD approach Time start_time = Time() - Period(300); Time end_time = signer.GetEndTime(); if(end_time < start_time) { logger.msg(VERBOSE, "Delegatable credentials expired: %s",end_time.str()); return false; } // CREAM is picky about end time of delegated credentials, so // make sure it does not exceed end time of signer Credential proxy_cred(start_time,end_time-start_time); proxy_cred.InquireRequest(proxyRequestStr); proxy_cred.SetProxyPolicy("gsi2", "", "", -1); if (!(signer.SignRequest(&proxy_cred, signedCert))) { logger.msg(VERBOSE, "Failed signing certificate request"); return false; } std::string signedOutputCert, signedOutputCertChain; signer.OutputCertificate(signedOutputCert); signer.OutputCertificateChain(signedOutputCertChain); signedCert.append(signedOutputCert).append(signedOutputCertChain); action = "putProxy"; req = PayloadSOAP(cream_ns); XMLNode putProxyRequest = req.NewChild("deleg:" + action); putProxyRequest.NewChild("delegationID") = delegation_id; putProxyRequest.NewChild("proxy") = signedCert; response = XMLNode(); if (!process(req, response, "http://www.gridsite.org/namespaces/delegation-2/")) return false; if (!response) { logger.msg(VERBOSE, "Failed putting signed delegation certificate to service"); return false; } return true; }
// Stock의 경우 Tenor를 Default로 사용. 나중에 DLS MC를 위해 작성되었음. virtual Period tenor() const{return Period();}
Rate YoYInflationIndex::fixing(const Date& fixingDate, bool /*forecastTodaysFixing*/) const { Date today = Settings::instance().evaluationDate(); Date todayMinusLag = today - availabilityLag_; std::pair<Date,Date> lim = inflationPeriod(todayMinusLag, frequency_); Date lastFix = lim.first-1; Date flatMustForecastOn = lastFix+1; Date interpMustForecastOn = lastFix+1 - Period(frequency_); if (interpolated() && fixingDate >= interpMustForecastOn) { return forecastFixing(fixingDate); } if (!interpolated() && fixingDate >= flatMustForecastOn) { return forecastFixing(fixingDate); } // four cases with ratio() and interpolated() if (ratio()) { if(interpolated()){ // IS ratio, IS interpolated std::pair<Date,Date> lim = inflationPeriod(fixingDate, frequency_); Date fixMinus1Y=NullCalendar().advance(fixingDate, -1*Years, ModifiedFollowing); std::pair<Date,Date> limBef = inflationPeriod(fixMinus1Y, frequency_); Real dp= lim.second + 1 - lim.first; Real dpBef=limBef.second + 1 - limBef.first; Real dl = fixingDate-lim.first; // potentially does not work on 29th Feb Real dlBef = fixMinus1Y - limBef.first; // get the four relevant fixings // recall that they are stored flat for every day Rate limFirstFix = IndexManager::instance().getHistory(name())[lim.first]; QL_REQUIRE(limFirstFix != Null<Rate>(), "Missing " << name() << " fixing for " << lim.first ); Rate limSecondFix = IndexManager::instance().getHistory(name())[lim.second+1]; QL_REQUIRE(limSecondFix != Null<Rate>(), "Missing " << name() << " fixing for " << lim.second+1 ); Rate limBefFirstFix = IndexManager::instance().getHistory(name())[limBef.first]; QL_REQUIRE(limBefFirstFix != Null<Rate>(), "Missing " << name() << " fixing for " << limBef.first ); Rate limBefSecondFix = IndexManager::instance().getHistory(name())[limBef.second+1]; QL_REQUIRE(limBefSecondFix != Null<Rate>(), "Missing " << name() << " fixing for " << limBef.second+1 ); Real linearNow = limFirstFix + (limSecondFix-limFirstFix)*dl/dp; Real linearBef = limBefFirstFix + (limBefSecondFix-limBefFirstFix)*dlBef/dpBef; Rate wasYES = linearNow / linearBef - 1.0; return wasYES; } else { // IS ratio, NOT interpolated Rate pastFixing = IndexManager::instance().getHistory(name())[fixingDate]; QL_REQUIRE(pastFixing != Null<Rate>(), "Missing " << name() << " fixing for " << fixingDate); Date previousDate = fixingDate - 1*Years; Rate previousFixing = IndexManager::instance().getHistory(name())[previousDate]; QL_REQUIRE(previousFixing != Null<Rate>(), "Missing " << name() << " fixing for " << previousDate ); return pastFixing/previousFixing - 1.0; } } else { // NOT ratio if (interpolated()) { // NOT ratio, IS interpolated std::pair<Date,Date> lim = inflationPeriod(fixingDate, frequency_); Real dp= lim.second + 1 - lim.first; Real dl = fixingDate-lim.first; Rate limFirstFix = IndexManager::instance().getHistory(name())[lim.first]; QL_REQUIRE(limFirstFix != Null<Rate>(), "Missing " << name() << " fixing for " << lim.first ); Rate limSecondFix = IndexManager::instance().getHistory(name())[lim.second+1]; QL_REQUIRE(limSecondFix != Null<Rate>(), "Missing " << name() << " fixing for " << lim.second+1 ); Real linearNow = limFirstFix + (limSecondFix-limFirstFix)*dl/dp; return linearNow; } else { // NOT ratio, NOT interpolated // so just flat Rate pastFixing = IndexManager::instance().getHistory(name())[fixingDate]; QL_REQUIRE(pastFixing != Null<Rate>(), "Missing " << name() << " fixing for " << fixingDate); return pastFixing; } } // QL_FAIL("YoYInflationIndex::fixing, should never get here"); }
CPICapFloorTermPriceSurface:: CPICapFloorTermPriceSurface(Real nominal, Real baseRate, // avoids an uncontrolled crash if index has no TS const Period &observationLag, const Calendar &cal, // calendar in index may not be useful const BusinessDayConvention &bdc, const DayCounter &dc, const Handle<ZeroInflationIndex>& zii, const Handle<YieldTermStructure>& yts, const std::vector<Rate> &cStrikes, const std::vector<Rate> &fStrikes, const std::vector<Period> &cfMaturities, const Matrix &cPrice, const Matrix &fPrice) : InflationTermStructure(0, cal, baseRate, observationLag, zii->frequency(), zii->interpolated(), yts, dc), bdc_(bdc), zii_(zii), cStrikes_(cStrikes), fStrikes_(fStrikes), cfMaturities_(cfMaturities), cPrice_(cPrice), fPrice_(fPrice) { // does the index have a TS? QL_REQUIRE(!zii_->zeroInflationTermStructure().empty(),"ZITS missing from index"); QL_REQUIRE(!this->nominalTermStructure().empty(),"nominal TS missing"); // data consistency checking, enough data? QL_REQUIRE(fStrikes_.size() > 1, "not enough floor strikes"); QL_REQUIRE(cStrikes_.size() > 1, "not enough cap strikes"); QL_REQUIRE(cfMaturities_.size() > 1, "not enough maturities"); QL_REQUIRE(fStrikes_.size() == fPrice.rows(), "floor strikes vs floor price rows not equal"); QL_REQUIRE(cStrikes_.size() == cPrice.rows(), "cap strikes vs cap price rows not equal"); QL_REQUIRE(cfMaturities_.size() == fPrice.columns(), "maturities vs floor price columns not equal"); QL_REQUIRE(cfMaturities_.size() == cPrice.columns(), "maturities vs cap price columns not equal"); // data has correct properties (positive, monotonic)? for(Size j = 0; j <cfMaturities_.size(); j++) { QL_REQUIRE( cfMaturities[j] > Period(0,Days), "non-positive maturities"); if(j>0) { QL_REQUIRE( cfMaturities[j] > cfMaturities[j-1], "non-increasing maturities"); } for(Size i = 0; i <fPrice_.rows(); i++) { QL_REQUIRE( fPrice_[i][j] > 0.0, "non-positive floor price: " << fPrice_[i][j] ); if(i>0) { QL_REQUIRE( fPrice_[i][j] >= fPrice_[i-1][j], "non-increasing floor prices"); } } for(Size i = 0; i <cPrice_.rows(); i++) { QL_REQUIRE( cPrice_[i][j] > 0.0, "non-positive cap price: " << cPrice_[i][j] ); if(i>0) { QL_REQUIRE( cPrice_[i][j] <= cPrice_[i-1][j], "non-decreasing cap prices: " << cPrice_[i][j] << " then " << cPrice_[i-1][j]); } } } // Get the set of strikes, noting that repeats, overlaps are // expected between caps and floors but that no overlap in the // output is allowed so no repeats or overlaps are used cfStrikes_ = std::vector<Rate>(); for(Size i = 0; i <fStrikes_.size(); i++) cfStrikes_.push_back( fStrikes[i] ); Real eps = 0.0000001; Rate maxFstrike = fStrikes_.back(); for(Size i = 0; i < cStrikes_.size(); i++) { Rate k = cStrikes[i]; if (k > maxFstrike + eps) cfStrikes_.push_back(k); } // final consistency checking QL_REQUIRE(cfStrikes_.size() > 2, "overall not enough strikes"); for (Size i = 1; i < cfStrikes_.size(); i++) QL_REQUIRE( cfStrikes_[i] > cfStrikes_[i-1], "cfStrikes not increasing"); }
KRWibor3M(const Handle<YieldTermStructure>& h = Handle<YieldTermStructure>()) : KRWibor(Period(3, Months), h) {}
int main(int argc, char * argv[]) { // first argument - config file // second argument - filelist // using namespace std; //const int CutNumb = 8; //string CutList[CutNumb]={"No cut","Trigger","1l","lept-Veto","b-Veto","MET $>$ 50","MET $>$ 100","dPhi $>$ 1"}; // **** configuration Config cfg(argv[1]); string Channel="mutau"; // kinematic cuts on electrons bool fillplots= false; bool Wtemplate= true; const bool isData = cfg.get<bool>("IsData"); const bool applyPUreweighting = cfg.get<bool>("ApplyPUreweighting"); const bool applyPUreweighting_vertices = cfg.get<bool>("ApplyPUreweighting_vertices"); const bool applyPUreweighting_official = cfg.get<bool>("ApplyPUreweighting_official"); const bool applyLeptonSF = cfg.get<bool>("ApplyLeptonSF"); const bool InvertTauIso = cfg.get<bool>("InvertTauIso"); const bool InvertLeptonIso = cfg.get<bool>("InvertLeptonIso"); const bool InvertMET = cfg.get<bool>("InvertMET"); const double ptElectronLowCut = cfg.get<double>("ptElectronLowCut"); const double ptElectronHighCut = cfg.get<double>("ptElectronHighCut"); const double etaElectronCut = cfg.get<double>("etaElectronCut"); const double dxyElectronCut = cfg.get<double>("dxyElectronCut"); const double dzElectronCut = cfg.get<double>("dzElectronCut"); const double isoElectronLowCut = cfg.get<double>("isoElectronLowCut"); const double isoElectronHighCut = cfg.get<double>("isoElectronHighCut"); const bool applyElectronId = cfg.get<bool>("ApplyElectronId"); // vertex cuts const double ndofVertexCut = cfg.get<double>("NdofVertexCut"); const double zVertexCut = cfg.get<double>("ZVertexCut"); const double dVertexCut = cfg.get<double>("DVertexCut"); // kinematic cuts on muons const double ptMuonLowCut = cfg.get<double>("ptMuonLowCut"); const double ptMuonHighCut = cfg.get<double>("ptMuonHighCut"); const double etaMuonCut = cfg.get<double>("etaMuonCut"); const double dxyMuonCut = cfg.get<double>("dxyMuonCut"); const double dzMuonCut = cfg.get<double>("dzMuonCut"); const double isoMuonLowCut = cfg.get<double>("isoMuonLowCut"); const double isoMuonHighCut = cfg.get<double>("isoMuonHighCut"); const double isoMuonHighCutQCD = cfg.get<double>("isoMuonHighCutQCD"); const bool applyMuonId = cfg.get<bool>("ApplyMuonId"); const double ptTauLowCut = cfg.get<double>("ptTauLowCut"); const double etaTauCut = cfg.get<double>("etaTauCut"); const string dataBaseDir = cfg.get<string>("DataBaseDir"); string TrigLeg ; if (!isData) TrigLeg = cfg.get<string>("Mu17LegMC"); if (isData) TrigLeg = cfg.get<string>("Mu18LegData"); const string Mu17Tau20MuLegA = cfg.get<string>("Mu17Tau20MuLegA"); const string Mu17Tau20MuLegB = cfg.get<string>("Mu17Tau20MuLegB"); const string Mu17Tau20TauLegA = cfg.get<string>("Mu17Tau20TauLegA"); const string Mu17Tau20TauLegB = cfg.get<string>("Mu17Tau20TauLegB"); const string SingleMuonTriggerFile = cfg.get<string>("Muon17TriggerEff"); const float singleMuonTriggerPtCut = cfg.get<float>("SingleMuonTriggerPtCut"); const float singleMuonTriggerEtaCut = cfg.get<float>("SingleMuonTriggerEtaCut"); const string Region = cfg.get<string>("Region"); const string Sign = cfg.get<string>("Sign"); const double leadchargedhadrcand_dz = cfg.get<double>("leadchargedhadrcand_dz"); const double leadchargedhadrcand_dxy = cfg.get<double>("leadchargedhadrcand_dxy"); // kinematic cuts on Jets const double etaJetCut = cfg.get<double>("etaJetCut"); const double ptJetCut = cfg.get<double>("ptJetCut"); // topological cuts const double dRleptonsCutmutau = cfg.get<double>("dRleptonsCutmutau"); const double dZetaCut = cfg.get<double>("dZetaCut"); const double deltaRTrigMatch = cfg.get<double>("DRTrigMatch"); const bool oppositeSign = cfg.get<bool>("oppositeSign"); const bool isIsoR03 = cfg.get<bool>("IsIsoR03"); // tau const double taupt = cfg.get<double>("taupt"); const double taueta = cfg.get<double>("taueta"); const double decayModeFinding = cfg.get<double>("decayModeFinding"); const double decayModeFindingNewDMs = cfg.get<double>("decayModeFindingNewDMs"); const double againstElectronVLooseMVA5 = cfg.get<double>("againstElectronVLooseMVA5"); const double againstMuonTight3 = cfg.get<double>("againstMuonTight3"); const double vertexz = cfg.get<double>("vertexz"); const double byCombinedIsolationDeltaBetaCorrRaw3Hits = cfg.get<double>("byCombinedIsolationDeltaBetaCorrRaw3Hits"); const unsigned int RunRangeMin = cfg.get<unsigned int>("RunRangeMin"); const unsigned int RunRangeMax = cfg.get<unsigned int>("RunRangeMax"); // vertex distributions filenames and histname const string vertDataFileName = cfg.get<string>("VertexDataFileName"); const string vertMcFileName = cfg.get<string>("VertexMcFileName"); const string vertHistName = cfg.get<string>("VertexHistName"); // lepton scale factors const string muonSfDataBarrel = cfg.get<string>("MuonSfDataBarrel"); const string muonSfDataEndcap = cfg.get<string>("MuonSfDataEndcap"); const string muonSfMcBarrel = cfg.get<string>("MuonSfMcBarrel"); const string muonSfMcEndcap = cfg.get<string>("MuonSfMcEndcap"); const string jsonFile = cfg.get<string>("jsonFile"); string cmsswBase = (getenv ("CMSSW_BASE")); string fullPathToJsonFile = cmsswBase + "/src/DesyTauAnalyses/NTupleMaker/test/json/" + jsonFile; const string MuonIdIsoFile = cfg.get<string>("MuonIdIsoEff"); const string TauFakeRateFile = cfg.get<string>("TauFakeRateEff"); // Run-lumi selector std::vector<Period> periods; if (isData) { // read the good runs std::fstream inputFileStream(fullPathToJsonFile.c_str(), std::ios::in); if (inputFileStream.fail() ) { std::cout << "Error: cannot find json file " << fullPathToJsonFile << std::endl; std::cout << "please check" << std::endl; std::cout << "quitting program" << std::endl; exit(-1); } for(std::string s; std::getline(inputFileStream, s); ) { //std::fstream inputFileStream("temp", std::ios::in); periods.push_back(Period()); std::stringstream ss(s); ss >> periods.back(); } } TString MainTrigger(TrigLeg); TString Muon17Tau20MuLegA (Mu17Tau20MuLegA ); TString Muon17Tau20MuLegB (Mu17Tau20MuLegB ); TString Muon17Tau20TauLegA (Mu17Tau20TauLegA ); TString Muon17Tau20TauLegB (Mu17Tau20TauLegB ); const double Lumi = cfg.get<double>("Lumi"); const double bTag = cfg.get<double>("bTag"); const double metcut = cfg.get<double>("metcut"); CutList.clear(); CutList.push_back("No cut"); CutList.push_back("No cut after PU"); CutList.push_back("$\\mu$"); CutList.push_back("$\\tau_h$"); CutList.push_back("Trigger"); CutList.push_back("2nd $\\ell$-Veto"); CutList.push_back("3rd $\\ell$-Veto"); CutList.push_back("Lepton SF"); CutList.push_back("TauFakeRate"); CutList.push_back("topPtRwgt"); CutList.push_back("${M}_T>60"); CutList.push_back("$ E_T^{\\rm miss}>$ 100"); CutList.push_back("Jets $<$3"); CutList.push_back("b-Veto"); CutList.push_back("$40<\\rm{Inv}_M<80"); CutList.push_back("$1.5<\\Delta R<4$"); int CutNumb = int(CutList.size()); xs=1;fact=1;fact2=1; unsigned int RunMin = 9999999; unsigned int RunMax = 0; ifstream ifs("xsecs"); string line; while(std::getline(ifs, line)) // read one line from ifs { fact=fact2=1; istringstream iss(line); // access line as a stream // we only need the first two columns string dt,st1,st2;st1="stau2_1";st2="stau5_2"; iss >> dt >> xs >> fact >> fact2; //ifs >> dt >> xs; // no need to read further //cout<< " "<<dt<<" "<<endl; //cout<< "For sample ========================"<<dt<<" xsecs is "<<xs<<" XSec "<<XSec<<" "<<fact<<" "<<fact2<<endl; //if (dt==argv[2]) { //if (std::string::npos != dt.find(argv[2])) { if ( dt == argv[2]) { XSec= xs*fact*fact2; cout<<" Found the correct cross section "<<xs<<" for Dataset "<<dt<<" XSec "<<XSec<<endl; } /* if ( argv[2] == st1) {ChiMass=100;mIntermediate=200;} else if (argv[2] == st2) {ChiMass=200;mIntermediate=500;} */ if (isData) XSec=1.; ChiMass=0.0; } if (XSec<0&& !isData) {cout<<" Something probably wrong with the xsecs...please check - the input was "<<argv[2]<<endl;return 0;} std::vector<unsigned int> allRuns; allRuns.clear(); cout<<" ChiMass is "<<ChiMass<<" "<<mIntermediate<<endl; bool doThirdLeptVeto=true; bool doMuVeto=true; //CutList[CutNumb]=CutListt[CutNumb]; char ff[100]; sprintf(ff,"%s/%s",argv[3],argv[2]); if (applyPUreweighting_vertices and applyPUreweighting_official) {std::cout<<"ERROR: Choose only ONE PU reweighting method (vertices or official, not both!) " <<std::endl; exit(-1);} // reweighting with vertices // reading vertex weights TFile * fileDataNVert = new TFile(TString(cmsswBase)+"/src/"+dataBaseDir+"/"+vertDataFileName); TFile * fileMcNVert = new TFile(TString(cmsswBase)+"/src/"+dataBaseDir+"/"+vertMcFileName); TH1D * vertexDataH = (TH1D*)fileDataNVert->Get(TString(vertHistName)); TH1D * vertexMcH = (TH1D*)fileMcNVert->Get(TString(vertHistName)); float normVertexData = vertexDataH->GetSumOfWeights(); float normVertexMc = vertexMcH->GetSumOfWeights(); vertexDataH->Scale(1/normVertexData); vertexMcH->Scale(1/normVertexMc); PileUp * PUofficial = new PileUp(); TFile * filePUdistribution_data = new TFile(TString(cmsswBase)+"/src/DesyTauAnalyses/NTupleMaker/data/PileUpDistrib/Data_Pileup_2015D_Nov17.root","read"); TFile * filePUdistribution_MC = new TFile (TString(cmsswBase)+"/src/DesyTauAnalyses/NTupleMaker/data/PileUpDistrib/MC_Spring15_PU25_Startup.root", "read"); TH1D * PU_data = (TH1D *)filePUdistribution_data->Get("pileup"); TH1D * PU_mc = (TH1D *)filePUdistribution_MC->Get("pileup"); PUofficial->set_h_data(PU_data); PUofficial->set_h_MC(PU_mc); TFile *f10= new TFile(TString(cmsswBase)+"/src/DesyTauAnalyses/NTupleMaker/data/"+muonSfDataBarrel); // mu SF barrel data TFile *f11 = new TFile(TString(cmsswBase)+"/src/DesyTauAnalyses/NTupleMaker/data/"+muonSfDataEndcap); // mu SF endcap data TFile *f12= new TFile(TString(cmsswBase)+"/src/DesyTauAnalyses/NTupleMaker/data/"+muonSfMcBarrel); // mu SF barrel MC TFile *f13 = new TFile(TString(cmsswBase)+"/src/DesyTauAnalyses/NTupleMaker/data/"+muonSfMcEndcap); // mu SF endcap MC TGraphAsymmErrors *hEffBarrelData = (TGraphAsymmErrors*)f10->Get("ZMassBarrel"); TGraphAsymmErrors *hEffEndcapData = (TGraphAsymmErrors*)f11->Get("ZMassEndcap"); TGraphAsymmErrors *hEffBarrelMC = (TGraphAsymmErrors*)f12->Get("ZMassBarrel"); TGraphAsymmErrors *hEffEndcapMC = (TGraphAsymmErrors*)f13->Get("ZMassEndcap"); double * dataEffBarrel = new double[10]; double * dataEffEndcap = new double[10]; double * mcEffBarrel = new double[10]; double * mcEffEndcap = new double[10]; dataEffBarrel = hEffBarrelData->GetY(); dataEffEndcap = hEffEndcapData->GetY(); mcEffBarrel = hEffBarrelMC->GetY(); mcEffEndcap = hEffEndcapMC->GetY(); // Lepton Scale Factors TH1D * MuSF_IdIso_Mu1H = new TH1D("MuIdIsoSF_Mu1H", "MuIdIsoSF_Mu1", 100, 0.5,1.5); ScaleFactor * SF_muonIdIso; if (applyLeptonSF) { SF_muonIdIso = new ScaleFactor(); SF_muonIdIso->init_ScaleFactor(TString(cmsswBase)+"/src/"+TString(MuonIdIsoFile)); } ScaleFactor * SF_muonTrigger = new ScaleFactor(); SF_muonTrigger->init_ScaleFactor(TString(cmsswBase)+"/src/"+TString(SingleMuonTriggerFile)); //////// cout<<" Will try to initialize the TFR now.... "<<endl; ScaleFactor * SF_TFR; bool applyTFR = true; if (applyTFR) { SF_TFR = new ScaleFactor(); SF_TFR->init_ScaleFactorb(TString(cmsswBase)+"/src/"+TString(TauFakeRateFile),applyTFR); } double Weight=0; int nTotalFiles = 0; int iCut=0; double CFCounter[CutNumb]; double statUnc[CutNumb]; int iCFCounter[CutNumb]; for (int i=0;i < CutNumb; i++){ CFCounter[i] = 0; iCFCounter[i] = 0; statUnc[i] =0; } // file name and tree name std::string rootFileName(argv[2]); //std::ifstream fileList(argv[2]); std::ifstream fileList(ff); //std::ifstream fileList0(argv[2]); std::ifstream fileList0(ff); std::string ntupleName("makeroottree/AC1B"); std::string initNtupleName("initroottree/AC1B"); TString era=argv[3]; TString invMuStr,invTauStr,invMETStr; if(InvertLeptonIso) invMuStr = "_InvMuIso_"; if(InvertTauIso) invTauStr = "_InvTauIso_"; if(InvertMET) invMETStr = "_InvMET_"; TString TStrName(rootFileName+invMuStr+invTauStr+invMETStr+"_"+Region+"_"+Sign); std::cout <<" The filename will be "<<TStrName <<std::endl; // output fileName with histograms TFile * file; if (isData) file = new TFile(era+"/"+TStrName+TString("_DataDriven.root"),"update"); if (!isData) file = new TFile(era+"/"+TStrName+TString(".root"),"update"); file->mkdir(Channel.c_str()); file->cd(Channel.c_str()); int nFiles = 0; int nEvents = 0; int selEvents = 0; int selEventsAllMuons = 0; int selEventsIdMuons = 0; int selEventsIsoMuons = 0; bool lumi=false; bool isLowIsoMu=false; bool isHighIsoMu = false; bool isLowIsoTau=false; bool isHighIsoTau = false; std::string dummy; // count number of files ---> while (fileList0 >> dummy) nTotalFiles++; SetupHists(CutNumb); if (argv[4] != NULL && atoi(argv[4])< nTotalFiles) nTotalFiles=atoi(argv[4]); //if (nTotalFiles>50) nTotalFiles=50; //nTotalFiles = 10; for (int iF=0; iF<nTotalFiles; ++iF) { std::string filen; fileList >> filen; std::cout << "file " << iF+1 << " out of " << nTotalFiles << " filename : " << filen << std::endl; TFile * file_ = TFile::Open(TString(filen)); TH1D * histoInputEvents = NULL; histoInputEvents = (TH1D*)file_->Get("makeroottree/nEvents"); if (histoInputEvents==NULL) continue; int NE = int(histoInputEvents->GetEntries()); for (int iE=0;iE<NE;++iE) inputEventsH->Fill(0.); std::cout << " number of input events = " << NE << std::endl; TTree * _inittree = NULL; _inittree = (TTree*)file_->Get(TString(initNtupleName)); if (_inittree==NULL) continue; Float_t genweight; if (!isData) _inittree->SetBranchAddress("genweight",&genweight); Long64_t numberOfEntriesInitTree = _inittree->GetEntries(); std::cout << " number of entries in Init Tree = " << numberOfEntriesInitTree << std::endl; for (Long64_t iEntry=0; iEntry<numberOfEntriesInitTree; iEntry++) { _inittree->GetEntry(iEntry); if (isData) histWeightsH->Fill(0.,1.); else histWeightsH->Fill(0.,genweight); } TTree * _tree = NULL; _tree = (TTree*)file_->Get(TString(ntupleName)); if (_tree==NULL) continue; Long64_t numberOfEntries = _tree->GetEntries(); std::cout << " number of entries in Tree = " << numberOfEntries << std::endl; AC1B analysisTree(_tree); // if (std::string::npos != rootFileName.find("TTJetsLO") || std::string::npos != rootFileName.find("TTPow")) //numberOfEntries = 1000; // numberOfEntries = 1000; for (Long64_t iEntry=0; iEntry<numberOfEntries; ++iEntry) { Float_t weight = 1; Float_t puweight = 1; //float topptweight = 1; analysisTree.GetEntry(iEntry); nEvents++; iCut = 0; //std::cout << " number of entries in Tree = " << numberOfEntries <<" starting weight "<<weight<< std::endl; if (nEvents%50000==0) cout << " processed " << nEvents << " events" << endl; if (fabs(analysisTree.primvertex_z)>zVertexCut) continue; if (analysisTree.primvertex_ndof<ndofVertexCut) continue; double dVertex = (analysisTree.primvertex_x*analysisTree.primvertex_x+ analysisTree.primvertex_y*analysisTree.primvertex_y); if (dVertex>dVertexCut) continue; if (analysisTree.primvertex_count<2) continue; //isData= false; bool lumi=false; isLowIsoMu=false; isHighIsoMu = false; isLowIsoTau=false; isHighIsoTau = false; Float_t genweights; float topPt = 0; float antitopPt = 0; bool isZTT = false; if(!isData) { /* TTree *genweightsTree = (TTree*)file_->Get("initroottree/AC1B"); genweightsTree->SetBranchAddress("genweight",&genweights); Long64_t numberOfEntriesInit = genweightsTree->GetEntries(); for (Long64_t iEntryInit=0; iEntryInit<numberOfEntriesInit; ++iEntryInit) { genweightsTree->GetEntry(iEntryInit); histWeightsH->Fill(0.,genweights); } */ /* for (unsigned int igent=0; igent < analysisTree.gentau_count; ++igent) { if (analysisTree.gentau_isPrompt[igent]) isZTT = true; } */ for (unsigned int igen=0; igen<analysisTree.genparticles_count; ++igen) { // cout<< " info = " << int(analysisTree.genparticles_count) <<" "<<int(analysisTree.genparticles_pdgid[igen])<<endl; if (analysisTree.genparticles_pdgid[igen]==6) topPt = TMath::Sqrt(analysisTree.genparticles_px[igen]*analysisTree.genparticles_px[igen]+ analysisTree.genparticles_py[igen]*analysisTree.genparticles_py[igen]); if (analysisTree.genparticles_pdgid[igen]==-6) antitopPt = TMath::Sqrt(analysisTree.genparticles_px[igen]*analysisTree.genparticles_px[igen]+ analysisTree.genparticles_py[igen]*analysisTree.genparticles_py[igen]); } weight *= analysisTree.genweight; lumi=true; //cout<<" weight from init "<<genweights<< " "<<analysisTree.genweight<<" "<<weight<<endl; /* if (applyPUreweighting) { int binNvert = vertexDataH->FindBin(analysisTree.primvertex_count); float_t dataNvert = vertexDataH->GetBinContent(binNvert); float_t mcNvert = vertexMcH->GetBinContent(binNvert); if (mcNvert < 1e-10){mcNvert=1e-10;} float_t vertWeight = dataNvert/mcNvert; weight *= vertWeight; // cout << "NVert = " << analysisTree.primvertex_count << " weight = " << vertWeight << endl; } */ } if (isData) { XSec = 1.; histRuns->Fill(analysisTree.event_run); ///////////////according to dimuons int n=analysisTree.event_run; int lum = analysisTree.event_luminosityblock; std::string num = std::to_string(n); std::string lnum = std::to_string(lum); for(const auto& a : periods) { if ( num.c_str() == a.name ) { //std::cout<< " Eureka "<<num<<" "<<a.name<<" "; // std::cout <<"min "<< last->lower << "- max last " << last->bigger << std::endl; for(auto b = a.ranges.begin(); b != std::prev(a.ranges.end()); ++b) { // cout<<b->lower<<" "<<b->bigger<<endl; if (lum >= b->lower && lum <= b->bigger ) lumi = true; } auto last = std::prev(a.ranges.end()); // std::cout <<"min "<< last->lower << "- max last " << last->bigger << std::endl; if ( (lum >=last->lower && lum <= last->bigger )) lumi=true; } } if (!lumi) continue; //if (lumi ) cout<<" ============= Found good run"<<" "<<n<<" "<<lum<<endl; } if (analysisTree.event_run<RunMin) RunMin = analysisTree.event_run; if (analysisTree.event_run>RunMax) RunMax = analysisTree.event_run; //std::cout << " Run : " << analysisTree.event_run << std::endl; bool isNewRun = true; if (allRuns.size()>0) { for (unsigned int iR=0; iR<allRuns.size(); ++iR) { if (analysisTree.event_run==allRuns.at(iR)) { isNewRun = false; break; } } } if (isNewRun) allRuns.push_back(analysisTree.event_run); if (!lumi) continue; JetsMV.clear(); ElMV.clear(); TauMV.clear(); MuMV.clear(); LeptMV.clear(); mu_index=-1; tau_index=-1; el_index=-1; double MET = sqrt ( analysisTree.pfmet_ex*analysisTree.pfmet_ex + analysisTree.pfmet_ey*analysisTree.pfmet_ey); METV.SetPx(analysisTree.pfmet_ex); METV.SetPy(analysisTree.pfmet_ey); METV.SetPz(analysisTree.pfmet_ez); METV.SetPhi(analysisTree.pfmet_phi); if(fillplots) FillMainHists(iCut, weight, ElMV, MuMV, TauMV,JetsMV,METV, ChiMass,mIntermediate,analysisTree, Channel, mu_index,el_index,tau_index); CFCounter[iCut]+= weight; iCFCounter[iCut]++; iCut++; for (unsigned int ijj = 0; ijj<analysisTree.pfjet_count; ++ijj) { JetsV.SetPxPyPzE(analysisTree.pfjet_px[ijj], analysisTree.pfjet_py[ijj], analysisTree.pfjet_pz[ijj], analysisTree.pfjet_e[ijj]); JetsMV.push_back(JetsV); } for (unsigned int imm = 0; imm<analysisTree.muon_count; ++imm) { MuV.SetPtEtaPhiM(analysisTree.muon_pt[imm], analysisTree.muon_eta[imm], analysisTree.muon_phi[imm], muonMass); MuMV.push_back(MuV); // mu_index=0; } for (unsigned int ie = 0; ie<analysisTree.electron_count; ++ie) { ElV.SetPtEtaPhiM(analysisTree.electron_pt[ie], analysisTree.electron_eta[ie], analysisTree.electron_phi[ie], electronMass); ElMV.push_back(ElV); // el_index=0; } for (unsigned int itt = 0; itt<analysisTree.tau_count; ++itt) { TauV.SetPtEtaPhiM(analysisTree.tau_pt[itt], analysisTree.tau_eta[itt], analysisTree.tau_phi[itt], tauMass); TauMV.push_back(TauV); // tau_index=0; } if (!isData ) { if (applyPUreweighting) { puweight = float(PUofficial->get_PUweight(double(analysisTree.numtruepileupinteractions))); weight *=puweight; } } // vector <string> ss; ss.push_back(.c_str()); if(fillplots) FillMainHists(iCut, weight, ElMV, MuMV, TauMV,JetsMV,METV, ChiMass,mIntermediate,analysisTree, Channel, mu_index,el_index,tau_index); CFCounter[iCut]+= weight; iCFCounter[iCut]++; iCut++; //selecTable.Fill(1,0, weight ); bool trigAccept = false; unsigned int nMainTrigger = 0; bool isMainTrigger = false; unsigned int nfilters = analysisTree.run_hltfilters->size(); // std::cout << "nfiltres = " << nfilters << std::endl; for (unsigned int i=0; i<nfilters; ++i) { // std::cout << "HLT Filter : " << i << " = " << analysisTree.run_hltfilters->at(i) << std::endl; TString HLTFilter(analysisTree.run_hltfilters->at(i)); if (HLTFilter==MainTrigger) { nMainTrigger = i; isMainTrigger = true; } } if (!isMainTrigger) { std::cout << "HLT filter for Mu20 " << MainTrigger << " not found" << std::endl; return(-1); } /////now clear the Mu.El.Jets again to fill them again after cleaning MuMV.clear(); ElMV.clear(); TauMV.clear(); LeptMV.clear(); double isoMuMin = 9999; bool mu_iso=false; vector<int> muons; muons.clear(); for (unsigned int im = 0; im<analysisTree.muon_count; ++im) { if (analysisTree.muon_pt[im]<ptMuonLowCut) continue; if (fabs(analysisTree.muon_eta[im])>etaMuonCut) continue; if (fabs(analysisTree.muon_dxy[im])>dxyMuonCut) continue; if (fabs(analysisTree.muon_dz[im])>dzMuonCut) continue; double absIso= analysisTree.muon_r03_sumChargedHadronPt[im] + max(analysisTree.muon_r03_sumNeutralHadronEt[im] + analysisTree.muon_r03_sumPhotonEt[im] - 0.5 * analysisTree.muon_r03_sumPUPt[im],0.0); double relIso = absIso/analysisTree.muon_pt[im]; if (relIso<isoMuonLowCut) continue; if (applyMuonId && !analysisTree.muon_isMedium[im]) continue; //cout<<" after muIso index "<<int(mu_index)<<" pT "<<analysisTree.muon_pt[im]<<" relIso "<<relIso<<" isoMuMin "<<isoMuMin<<" muon_count "<<analysisTree.muon_count<<" im "<<im<<" event "<<iEntry<<endl; if (double(relIso)<double(isoMuMin)) { isoMuMin = relIso; mu_index = int(im); mu_iso=true; //cout<<" after muIso index "<<int(mu_index)<<" pT "<<analysisTree.muon_pt[im]<<" relIso "<<relIso<<" isoMuMin "<<isoMuMin<<" muon_count "<<analysisTree.muon_count<<" im "<<im<<" event "<<iEntry<<endl; muons.push_back(im); MuV.SetPtEtaPhiM(analysisTree.muon_pt[mu_index], analysisTree.muon_eta[mu_index], analysisTree.muon_phi[mu_index], muonMass); MuMV.push_back(MuV); LeptMV.push_back(MuV); } //cout<<" Indexes here "<<im<<" "<<mu_index<<endl; if (relIso == isoMuMin && im != mu_index) { //cout<<" found a pair for muons " <<relIso <<" mu_index "<<mu_index<<" pT "<<analysisTree.muon_pt[int(mu_index)]<<" new index "<<im<<" pT "<<analysisTree.muon_pt[int(im)]<<" event "<<iEntry<<endl; analysisTree.muon_pt[im] > analysisTree.muon_pt[mu_index] ? mu_index = int(im) : mu_index = mu_index; } } if (muons.size()==0 || !mu_iso ) continue; double absIso= analysisTree.muon_r03_sumChargedHadronPt[mu_index] + max(analysisTree.muon_r03_sumNeutralHadronEt[mu_index] + analysisTree.muon_r03_sumPhotonEt[mu_index] - 0.5 * analysisTree.muon_r03_sumPUPt[mu_index],0.0); double relIso = absIso/analysisTree.muon_pt[mu_index]; if (relIso>isoMuonHighCut && !InvertLeptonIso) continue; if (relIso>isoMuonHighCutQCD ) { isHighIsoMu=true ;isLowIsoMu=false;} else { isHighIsoMu = false;isLowIsoMu=true;} sort(LeptMV.begin(), LeptMV.end(),ComparePt); if (LeptMV.size() == 0 ) continue; if (InvertLeptonIso && !isHighIsoMu) continue; if (!InvertLeptonIso && isHighIsoMu) continue; if (InvertLeptonIso && isLowIsoMu) continue; //cout<<" Iso check "<<relIso<<" InvertLeptonIso "<<InvertLeptonIso<<" isHighIsoMu "<<isHighIsoMu<<" isLowIsoMu "<<isLowIsoMu<<" cutQCD "<<isoMuonHighCutQCD<<endl; if(fillplots) FillMainHists(iCut, weight, ElMV, MuMV, TauMV,JetsMV,METV, ChiMass,mIntermediate,analysisTree, Channel, mu_index,el_index,tau_index); CFCounter[iCut]+= weight; iCFCounter[iCut]++; iCut++; double isoTauMin = 999; bool tau_iso = false; vector<int> tau; tau.clear(); for (unsigned int it = 0; it<analysisTree.tau_count; ++it) { if (analysisTree.tau_pt[it] < ptTauLowCut || fabs(analysisTree.tau_eta[it])> etaTauCut) continue; if (analysisTree.tau_decayModeFindingNewDMs[it]<decayModeFindingNewDMs) continue; if ( fabs(analysisTree.tau_leadchargedhadrcand_dz[it])> leadchargedhadrcand_dz) continue; if (analysisTree.tau_againstElectronVLooseMVA5[it]<againstElectronVLooseMVA5) continue; if (analysisTree.tau_againstMuonTight3[it]<againstMuonTight3) continue; //cout<<" "<<analysisTree.tau_byMediumCombinedIsolationDeltaBetaCorr3Hits[it]<<endl; if (!InvertTauIso && analysisTree.tau_byCombinedIsolationDeltaBetaCorrRaw3Hits[it] > byCombinedIsolationDeltaBetaCorrRaw3Hits ) continue; //if (!InvertTauIso && analysisTree.tau_byMediumCombinedIsolationDeltaBetaCorr3Hits[it] < 0.5 ) continue; double tauIso = analysisTree.tau_byCombinedIsolationDeltaBetaCorrRaw3Hits[it]; if (tauIso<isoTauMin ) { // cout<<" there was a chenge "<<tauIso<<" "<<isoTauMin<<" it "<<it<<" tau_index "<<tau_index<<" "<<analysisTree.tau_count<<endl; isoTauMin = tauIso; tau_iso=true; tau_index = (int)it; tau.push_back(tau_index); TauV.SetPtEtaPhiM(analysisTree.tau_pt[tau_index], analysisTree.tau_eta[tau_index], analysisTree.tau_phi[tau_index], tauMass); TauMV.push_back(TauV); } continue; if (tauIso==isoTauMin && it != tau_index) { //analysisTree.tau_pt[it] > analysisTree.tau_pt[tau_index] ? tau_index = it : tau_index = tau_index; if (analysisTree.tau_pt[it] > analysisTree.tau_pt[tau_index] ) tau_index = (int)it ; //cout<<" found a pair " <<tauIso <<" "<<tau_index<<" "<<it<<endl; } } if (tau.size()==0 || !tau_iso ) continue; // cout<< " Lets check "<<mu_index <<" "<<tau_index <<" "<<endl; //cout<<" "<<endl; ////////////////////change to new tau inverted definition double tauIsoI = analysisTree.tau_byMediumCombinedIsolationDeltaBetaCorr3Hits[tau_index]; if (tauIsoI > 0.5 && InvertTauIso) {isHighIsoTau =true;} //else {isHighIsoTau =false ; isLowIsoTau=true;} //if (isHighIsoTau && tauIso > 2*byCombinedIsolationDeltaBetaCorrRaw3Hits ) continue; if (InvertTauIso && !isHighIsoTau) continue; if (!InvertTauIso && isHighIsoTau) continue; //if (InvertTauIso && isLowIsoTau) continue; /* continue; double isoTauMin = 999; bool tau_iso = false; vector<int> tau; tau.clear(); for (unsigned int it = 0; it<analysisTree.tau_count; ++it) { if (analysisTree.tau_pt[it] < ptTauLowCut || fabs(analysisTree.tau_eta[it])> etaTauCut) continue; if (analysisTree.tau_decayModeFindingNewDMs[it]<decayModeFindingNewDMs) continue; if ( fabs(analysisTree.tau_leadchargedhadrcand_dz[it])> leadchargedhadrcand_dz) continue; if (analysisTree.tau_againstElectronVLooseMVA5[it]<againstElectronVLooseMVA5) continue; if (analysisTree.tau_againstMuonTight3[it]<againstMuonTight3) continue; //if (!InvertTauIso && analysisTree.tau_byCombinedIsolationDeltaBetaCorrRaw3Hits[it] > byCombinedIsolationDeltaBetaCorrRaw3Hits ) continue; cout<<" "<<analysisTree.tau_byMediumCombinedIsolationDeltaBetaCorr3Hits[it]<<endl; //aif (!InvertTauIso && analysisTree.tau_byMediumCombinedIsolationDeltaBetaCorr3Hits[it] < 0.5 ) continue; double tauIso = analysisTree.tau_byCombinedIsolationDeltaBetaCorrRaw3Hits[it]; if (tauIso<isoTauMin ) { // cout<<" there was a chenge "<<tauIso<<" "<<isoTauMin<<" it "<<it<<" tau_index "<<tau_index<<" "<<analysisTree.tau_count<<endl; isoTauMin = tauIso; tau_iso=true; tau_index = int(it); tau.push_back(tau_index); TauV.SetPtEtaPhiM(analysisTree.tau_pt[tau_index], analysisTree.tau_eta[tau_index], analysisTree.tau_phi[tau_index], tauMass); TauMV.push_back(TauV); } if (tauIso==isoTauMin && it != tau_index) { analysisTree.tau_pt[it] > analysisTree.tau_pt[tau_index] ? tau_index = int(it) : tau_index = tau_index; //cout<<" found a pair " <<tauIso <<" "<<tau_index<<" "<<it<<endl; } } if (tau.size()==0 || !tau_iso ) continue; double tauIsoI = analysisTree.tau_byMediumCombinedIsolationDeltaBetaCorr3Hits[tau_index]; if (tauIsoI > 0.5 && InvertTauIso) {isHighIsoTau =true;} //else {isHighIsoTau =false ; isLowIsoTau=true;} //if (isHighIsoTau && tauIso > 2*byCombinedIsolationDeltaBetaCorrRaw3Hits ) continue; if (InvertTauIso && !isHighIsoTau) continue; if (!InvertTauIso && isHighIsoTau) continue; //if (InvertTauIso && isLowIsoTau) continue; */ double q = analysisTree.tau_charge[tau_index] * analysisTree.muon_charge[mu_index]; if (q>0 && Sign=="OS" ) continue; if (q<0 && Sign=="SS" ) continue; bool regionB = (q<0 && isLowIsoMu); bool regionA = (q>0 && isLowIsoMu); bool regionC = (q<0 && isHighIsoMu); bool regionD = (q>0 && isHighIsoMu); if(fillplots) FillMainHists(iCut, weight, ElMV, MuMV, TauMV,JetsMV,METV, ChiMass,mIntermediate,analysisTree, Channel, mu_index,el_index,tau_index); CFCounter[iCut]+= weight; iCFCounter[iCut]++; iCut++; //cout<<" HOW MANY MUONS DO I HAVE ?? "<<muons.size()<<endl; bool isdRLeptonMatched = false; for (unsigned int iT=0; iT<analysisTree.trigobject_count; ++iT) { if (analysisTree.trigobject_filters[iT][nMainTrigger]) { // Mu17 Leg double dRtrig = deltaR(analysisTree.muon_eta[mu_index],analysisTree.muon_phi[mu_index], analysisTree.trigobject_eta[iT],analysisTree.trigobject_phi[iT]); if (!isData && analysisTree.trigobject_filters[iT][nMainTrigger] && analysisTree.trigobject_pt[iT]>singleMuonTriggerPtCut && dRtrig<deltaRTrigMatch) isdRLeptonMatched = true; if (isData && dRtrig<deltaRTrigMatch) isdRLeptonMatched=true; } } if (!isdRLeptonMatched) continue; double dR = deltaR(analysisTree.tau_eta[tau_index],analysisTree.tau_phi[tau_index], analysisTree.muon_eta[mu_index],analysisTree.muon_phi[mu_index]); if (dR<dRleptonsCutmutau) continue; double ptMu1 = (double)analysisTree.muon_pt[mu_index]; double etaMu1 = (double)analysisTree.muon_eta[mu_index]; float trigweight=1.; float Mu17EffData = (float)SF_muonTrigger->get_EfficiencyData(double(ptMu1),double(etaMu1)); float Mu17EffMC = (float)SF_muonTrigger->get_EfficiencyMC(double(ptMu1),double(etaMu1)); if (!isData) { if (Mu17EffMC>1e-6) trigweight = Mu17EffData / Mu17EffMC; weight *= trigweight; // cout<<" Trigger weight "<<trigweight<<endl; } if(fillplots) FillMainHists(iCut, weight, ElMV, MuMV, TauMV,JetsMV,METV, ChiMass,mIntermediate,analysisTree, Channel, mu_index,el_index,tau_index); CFCounter[iCut]+= weight; iCFCounter[iCut]++; iCut++; //Set this flag if there is an opposite-charge muon pair in the event with muons separated by DR>0.15 and both passing the loose selection: bool MuVeto=false; if (doMuVeto){ if (muons.size()>1){ for (unsigned int imv = 0; imv<analysisTree.muon_count; ++imv) { if ( imv != mu_index ){ double absIso= analysisTree.muon_r03_sumChargedHadronPt[imv] + max(analysisTree.muon_r03_sumNeutralHadronEt[imv] + analysisTree.muon_r03_sumPhotonEt[imv] - 0.5 * analysisTree.muon_r03_sumPUPt[imv],0.0); double relIso = absIso/analysisTree.muon_pt[imv]; double dRr = deltaR(analysisTree.muon_eta[mu_index],analysisTree.muon_phi[mu_index], analysisTree.muon_eta[imv],analysisTree.muon_phi[imv]); bool OSCharge = false; if ( imv != mu_index && analysisTree.muon_charge[imv] != analysisTree.muon_charge[mu_index] ) OSCharge=true; //if ( analysisTree.muon_charge[imv] != analysisTree.muon_charge[mu_index] && analysisTree.muon_isGlobal[imv] && analysisTree.muon_isTracker[imv] && analysisTree.muon_isPF[imv] if ( analysisTree.muon_charge[imv] != analysisTree.muon_charge[mu_index] && analysisTree.muon_isGlobal[imv] && analysisTree.muon_isTracker[imv] && analysisTree.muon_isPF[imv] && analysisTree.muon_pt[imv]> 15 && fabs(analysisTree.muon_eta[imv])< 2.4 && fabs(analysisTree.muon_dxy[imv])<0.045 && fabs(analysisTree.muon_dz[imv] < 0.2 && relIso< 0.3 && analysisTree.muon_isMedium[imv]) && dRr > 0.15 && OSCharge) //removed from last recipe MuVeto=true; } } } } if (MuVeto) continue; if(fillplots) FillMainHists(iCut, weight, ElMV, MuMV, TauMV,JetsMV,METV, ChiMass,mIntermediate,analysisTree, Channel, mu_index,el_index,tau_index); CFCounter[iCut]+= weight; iCFCounter[iCut]++; iCut++; bool ThirdLeptVeto=false; if (doThirdLeptVeto){ if (analysisTree.electron_count>0) { for (unsigned int iev = 0; iev<analysisTree.electron_count; ++iev) { /* double neutralIsoV = analysisTree.electron_r03_sumNeutralHadronEt[iev] + analysisTree.electron_r03_sumNeutralHadronEt[iev] + analysisTree.electron_r03_sumPhotonEt[iev] - 4*TMath::Pi()*(0.3*0.3)*analysisTree.rho; double IsoWithEA = analysisTree.electron_r03_sumChargedHadronPt[iev] + TMath::Max(double(0), neutralIsoV); */ double IsoWithEA = analysisTree.electron_r03_sumChargedHadronPt[iev] + max(analysisTree.electron_r03_sumNeutralHadronEt[iev] + analysisTree.electron_r03_sumPhotonEt[iev] - 0.5 * analysisTree.electron_r03_sumPUPt[iev], 0.0) ; double relIsoV = IsoWithEA/analysisTree.electron_pt[iev]; bool electronMvaId = electronMvaIdWP90(analysisTree.electron_pt[iev], analysisTree.electron_superclusterEta[iev], analysisTree.electron_mva_id_nontrigPhys14[iev]); if ( iev != el_index && analysisTree.electron_pt[iev] > 10 && fabs(analysisTree.electron_eta[iev]) < 2.5 && fabs(analysisTree.electron_dxy[iev])<0.045 && fabs(analysisTree.electron_dz[iev]) < 0.2 && relIsoV< 0.3 && electronMvaId && analysisTree.electron_pass_conversion[iev] && analysisTree.electron_nmissinginnerhits[iev] <=1) ThirdLeptVeto=true; } } if (analysisTree.muon_count>0){ for (unsigned int imvv = 0; imvv<analysisTree.muon_count; ++imvv) { // if ( imvv != mu_index && analysisTree.muon_charge[imvv] != analysisTree.muon_charge[mu_index] ){ double absIso= analysisTree.muon_r03_sumChargedHadronPt[imvv] + max(analysisTree.muon_r03_sumNeutralHadronEt[imvv] + analysisTree.muon_r03_sumPhotonEt[imvv] - 0.5 * analysisTree.muon_r03_sumPUPt[imvv],0.0); double relIso = absIso/analysisTree.muon_pt[imvv]; if ( imvv != mu_index && analysisTree.muon_isMedium[imvv] && analysisTree.muon_pt[imvv]> 10 && fabs(analysisTree.muon_eta[imvv])< 2.4 && fabs(analysisTree.muon_dxy[imvv])<0.045 && fabs(analysisTree.muon_dz[imvv] < 0.2 && relIso< 0.3 && analysisTree.muon_isMedium[imvv]) ) ThirdLeptVeto=true; } } } if (ThirdLeptVeto) continue; if(fillplots) FillMainHists(iCut, weight, ElMV, MuMV, TauMV,JetsMV,METV, ChiMass,mIntermediate,analysisTree, Channel, mu_index,el_index,tau_index); CFCounter[iCut]+= weight; iCFCounter[iCut]++; iCut++; if (!isData && applyLeptonSF) { //leptonSFweight = SF_yourScaleFactor->get_ScaleFactor(pt, eta) double ptMu1 = (double)analysisTree.muon_pt[mu_index]; double etaMu1 = (double)analysisTree.muon_eta[mu_index]; double IdIsoSF_mu1 = SF_muonIdIso->get_ScaleFactor(ptMu1, etaMu1); MuSF_IdIso_Mu1H->Fill(IdIsoSF_mu1); weight = weight*IdIsoSF_mu1; } if(fillplots) FillMainHists(iCut, weight, ElMV, MuMV, TauMV,JetsMV,METV, ChiMass,mIntermediate,analysisTree, Channel, mu_index,el_index,tau_index); CFCounter[iCut]+= weight; iCFCounter[iCut]++; iCut++; TLorentzVector muVc ; muVc.SetPtEtaPhiM(analysisTree.muon_pt[mu_index], analysisTree.muon_eta[mu_index], analysisTree.muon_phi[mu_index], muonMass); TLorentzVector tauVc; tauVc.SetPtEtaPhiM(analysisTree.tau_pt[tau_index], analysisTree.tau_eta[tau_index], analysisTree.tau_phi[tau_index], tauMass); double MTv = mT(muVc,METV); if (!isData && applyTFR) { //leptonSFweight = SF_yourScaleFactor->get_ScaleFactor(pt, eta) double ptTau1 = (double)analysisTree.tau_pt[tau_index]; double etaTau1 = (double)analysisTree.tau_eta[tau_index]; double TFRSF_mu1 = SF_TFR->get_ScaleFactor(ptTau1, etaTau1); MuSF_IdIso_Mu1H->Fill(TFRSF_mu1); weight = weight*TFRSF_mu1; //cout<<" "<<TFRSF_mu1<<" for eta "<<etaTau1<< " pT "<< ptTau1<<endl; } if(fillplots) FillMainHists(iCut, weight, ElMV, MuMV, TauMV,JetsMV,METV, ChiMass,mIntermediate,analysisTree, Channel, mu_index,el_index,tau_index); CFCounter[iCut]+= weight; iCFCounter[iCut]++; iCut++; if (!isData && ( string::npos != filen.find("TTJets") || string::npos != filen.find("TTPowHeg")) ) //if (!isData ) { if (topPt>0.&&antitopPt>0.) { float topptweight = topPtWeight(topPt,antitopPt); // cout<<" "<<topPt<<" "<<antitopPt<<endl; weight *= topptweight; } } if(fillplots) FillMainHists(iCut, weight, ElMV, MuMV, TauMV,JetsMV,METV, ChiMass,mIntermediate,analysisTree, Channel, mu_index,el_index,tau_index); CFCounter[iCut]+= weight; iCFCounter[iCut]++; iCut++; if (MTv<60 ) continue; if(fillplots) FillMainHists(iCut, weight, ElMV, MuMV, TauMV,JetsMV,METV, ChiMass,mIntermediate,analysisTree, Channel, mu_index,el_index,tau_index); CFCounter[iCut]+= weight; iCFCounter[iCut]++; iCut++; // for (unsigned int j=0;j<LeptMV.size();++j) cout<<" j "<<j<<" "<<LeptMV.at(j).Pt()<<endl; // cout<<""<<endl; ////////jets cleaning vector<int> jets; jets.clear(); TLorentzVector leptonsV, muonJ, jetsLV; // continue; //JetsV.SetPxPyPzE(analysisTree.pfjet_px[ij], analysisTree.pfjet_py[ij], analysisTree.pfjet_pz[ij], analysisTree.pfjet_e[ij]); //double ETmiss = TMath::Sqrt(analysisTree.pfmet_ex*analysisTree.pfmet_ex + analysisTree.pfmet_ey*analysisTree.pfmet_ey); double ETmiss = METV.Pt();//TMath::Sqrt(analysisTree.pfmet_ex*analysisTree.pfmet_ex + analysisTree.pfmet_ey*analysisTree.pfmet_ey); if (InvertMET && ETmiss > 100. ) continue; if (!InvertMET && ETmiss < 100. ) continue; //that is the nominal selection ie MET > 100 if(fillplots) FillMainHists(iCut, weight, ElMV, MuMV, TauMV,JetsMV,METV, ChiMass,mIntermediate,analysisTree, Channel, mu_index,el_index,tau_index); CFCounter[iCut]+= weight; iCFCounter[iCut]++; iCut++; double ptScalarSum = -1; bool btagged= false; JetsMV.clear(); float jetEtaCut = 2.4; float DRmax = 0.5; int countjets = 0; for (unsigned int jet=0; jet<analysisTree.pfjet_count; ++jet) { float absJetEta = fabs(analysisTree.pfjet_eta[jet]); if (absJetEta > etaJetCut) continue; if (fabs(analysisTree.pfjet_pt[jet])<ptJetCut) continue; //double Dr= deltaR(LeptMV.at(il).Eta(), LeptMV.at(il).Phi(), bool isPFJetId = false ; isPFJetId =looseJetiD(analysisTree,jet); if (!isPFJetId) continue; //for (unsigned int lep=0;LeptMV.size();lep++){ //double Dr=(LeptMV.at(lep).Eta(),LeptMV.at(lep).Phi(), double Dr=deltaR(analysisTree.muon_eta[mu_index],analysisTree.muon_phi[mu_index], analysisTree.pfjet_eta[jet],analysisTree.pfjet_phi[jet]); if ( Dr < DRmax) continue; double Drr=deltaR(analysisTree.tau_eta[tau_index],analysisTree.tau_phi[tau_index], analysisTree.pfjet_eta[jet],analysisTree.pfjet_phi[jet]); if ( Drr < DRmax) continue; if (analysisTree.pfjet_btag[jet][0] > bTag) btagged = true; JetsV.SetPxPyPzE(analysisTree.pfjet_px[jet], analysisTree.pfjet_py[jet], analysisTree.pfjet_pz[jet], analysisTree.pfjet_e[jet]); JetsMV.push_back(JetsV); countjets++; } if (countjets >2 ) continue; if(fillplots) FillMainHists(iCut, weight, ElMV, MuMV, TauMV,JetsMV,METV, ChiMass,mIntermediate,analysisTree, Channel, mu_index,el_index,tau_index); CFCounter[iCut]+= weight; iCFCounter[iCut]++; iCut++; if (btagged ) continue; if(fillplots) FillMainHists(iCut, weight, ElMV, MuMV, TauMV,JetsMV,METV, ChiMass,mIntermediate,analysisTree, Channel, mu_index,el_index,tau_index); CFCounter[iCut]+= weight; iCFCounter[iCut]++; iCut++; // pt Scalar //cout<<" "<<mu_index<<" "<<tau_index<<" "<<MuMV.at(mu_index).M()<<" "<<TauMV.at(tau_index).M()<<endl; TLorentzVector diL = muVc + tauVc; if ( diL.M() < 100 ) continue; if(fillplots) FillMainHists(iCut, weight, ElMV, MuMV, TauMV,JetsMV,METV, ChiMass,mIntermediate,analysisTree, Channel, mu_index,el_index,tau_index); CFCounter[iCut]+= weight; iCFCounter[iCut]++; iCut++; /* if (ETmiss < 100) continue; if (ETmiss < 120) continue; FillMainHists(iCut, weight, ElMV, MuMV, TauMV,JetsMV,METV, ChiMass,mIntermediate,analysisTree, Channel, mu_index,el_index,tau_index); CFCounter[iCut]+= weight; iCFCounter[iCut]++; iCut++; // topological cut //if (DZeta<dZetaCut) continue; */ //double dRr = deltaR(diL.Eta(), diL.Phi(), METV.Eta(), METV.Phi()); double dRr = deltaR(muVc.Eta(), muVc.Phi(), tauVc.Eta(), tauVc.Phi()); if (dRr>3 ) continue; if(fillplots) FillMainHists(iCut, weight, ElMV, MuMV, TauMV,JetsMV,METV, ChiMass,mIntermediate,analysisTree, Channel, mu_index,el_index,tau_index); CFCounter[iCut]+= weight; iCFCounter[iCut]++; iCut++; FillTree(); selEvents++; } // end of file processing (loop over events in one file) nFiles++; delete _tree; file_->Close(); delete file_; } cout<<"done"<<endl; cout<<" Total events "<<nEvents<<" Will use weight "<<histWeightsH->GetSumOfWeights()<<" Norm Factor for a Lumi of "<<Lumi<<"/pb is "<<XSec*Lumi/( histWeightsH->GetSumOfWeights())<<endl; cout<<" First content "<<CFCounter[0]<<endl; cout<<" Run range from -----> "<<RunMin<<" to "<<RunMax<<endl; /* for (int i=0;i<CutNumb;++i){ CFCounter[i] *= double(XSec*Lumi/( histWeights->GetSumOfWeights())); if (iCFCounter[i] <0.2) statUnc[i] =0; else statUnc[i] = CFCounter[i]/sqrt(iCFCounter[i]); } */ //write out cutflow ofstream tfile; // TString outname = argv[argc-1]; TString outname=argv[2]; TString textfilename = "cutflow_"+outname+"_"+Channel+"_"+argv[3]+".txt"; // tfile.open(textfilename); // tfile << "########################################" << endl; for(int ci = 0; ci < CutNumb; ci++) { // tfile << CutList[ci]<<"\t & \t" // << CFCounter[ci] <<"\t & \t"<< statUnc[ci] <<"\t & \t"<< iCFCounter[ci] << endl; CutFlowUnW->SetBinContent(1+ci,0); CutFlow->SetBinContent(1+ci,0); CutFlowUnW->SetBinContent(1+ci,float(CFCounter[ci]) ); CFCounter[ci] *= double(XSec*Lumi/( histWeightsH->GetSumOfWeights())); CutFlow->SetBinContent(1+ci,float(CFCounter[ci])); cout << " i "<<ci<<" "<<iCFCounter[ci]<<" "<<XSec*Lumi/( histWeightsH->GetSumOfWeights())<<" "<<CutFlowUnW->GetBinContent(1+ci)<<" "<<CutFlow->GetBinContent(1+ci)<<endl; if (iCFCounter[ci] <0.2) statUnc[ci] =0; //else statUnc[i] = CFCounter[i]/sqrt(iCFCounter[i]); else statUnc[ci] = sqrt(CFCounter[ci]); } //ofstream tfile1; //TString textfile_Con = "CMG_cutflow_Con_Mu_"+outname+".txt"; //tfile1.open(textfile_Con); //tfile1 << "########################################" << endl; //tfile << "Cut efficiency numbers:" << endl; // tfile << " Cut "<<"\t & \t"<<"#Evnts for "<<Lumi/1000<<" fb-1 & \t"<<" Uncertainty \t"<<" cnt\t"<<endl; // tfile.close(); std::cout << std::endl; int allEvents = int(inputEventsH->GetEntries()); std::cout << "Total number of input events = " << allEvents << std::endl; std::cout << "Total number of events in Tree = " << nEvents << std::endl; std::cout << "Total number of selected events = " << selEvents << std::endl; std::cout << std::endl; file->cd(Channel.c_str()); WriteTree(); hxsec->Fill(XSec); hxsec->Write(); inputEventsH->Write(); histWeightsH->Write(); histRuns->Write(); CutFlowUnW->Write(); CutFlow->Write(); MuSF_IdIso_Mu1H->Write(); file->Write(); file->Close(); delete file; }
void americanSwaption( boost::shared_ptr<LiborForwardModel> & lfm, boost::shared_ptr<IborIndex> & libor, utilities::csvBuilder & file) { Date pricingDate = // pricing date Settings::instance().evaluationDate(); Date optionStart = libor->fixingCalendar().advance( // start in 2 days pricingDate, Period(2, Days)); Date optionEnd(16, July, 2016); Date fwdMaturity(16, July, 2021); //Date optionEnd = libor->fixingCalendar().advance( // start in 2 days // optionStart, // Period(6, Months)); //Date fwdMaturity = optionStart + Period(3, Years); // underlying 3 years Schedule schedule( optionStart, fwdMaturity, libor->tenor(), libor->fixingCalendar(), ModifiedFollowing, ModifiedFollowing, DateGeneration::Backward, false); Rate swapRate = 0.0404; // dummy swap rate boost::shared_ptr<VanillaSwap> forwardSwap( new VanillaSwap(VanillaSwap::Receiver, 100.0, schedule, swapRate, ActualActual(), schedule, libor, 0.0, libor->dayCounter())); forwardSwap->setPricingEngine(boost::shared_ptr<PricingEngine>( new DiscountingSwapEngine(libor->forwardingTermStructure()))); swapRate = forwardSwap->fairRate(); // obtain the fair rate forwardSwap = boost::shared_ptr<VanillaSwap>( // rebuild the "right" swap new VanillaSwap(VanillaSwap::Receiver, 100.0, schedule, swapRate, ActualActual(), schedule, libor, 0.0, libor->dayCounter())); forwardSwap->setPricingEngine(boost::shared_ptr<PricingEngine>( new DiscountingSwapEngine(libor->forwardingTermStructure()))); boost::shared_ptr<PricingEngine> engine( new LfmSwaptionEngine(lfm, libor->forwardingTermStructure())); boost::shared_ptr<Exercise> exercise( new AmericanExercise(optionEnd)); boost::shared_ptr<Swaption> americanSwaption( // create the swaption new Swaption(forwardSwap, exercise)); americanSwaption->setPricingEngine(engine); Real npv = americanSwaption->NPV(); std::cout << "American swaption npv: " // information << npv << std::endl; }
void IntegralCdsEngine::calculate() const { QL_REQUIRE(integrationStep_ != Period(), "null period set"); QL_REQUIRE(!discountCurve_.empty(), "no discount term structure set"); QL_REQUIRE(!probability_.empty(), "no probability term structure set"); Date today = Settings::instance().evaluationDate(); Date settlementDate = discountCurve_->referenceDate(); // Upfront Flow NPV. Either we are on-the-run (no flow) // or we are forward start Real upfPVO1 = 0.0; if(!arguments_.upfrontPayment->hasOccurred( settlementDate, includeSettlementDateFlows_)) { // date determining the probability survival so we have to pay // the upfront (did not knock out) Date effectiveUpfrontDate = arguments_.protectionStart > probability_->referenceDate() ? arguments_.protectionStart : probability_->referenceDate(); upfPVO1 = probability_->survivalProbability(effectiveUpfrontDate) * discountCurve_->discount(arguments_.upfrontPayment->date()); } results_.upfrontNPV = upfPVO1 * arguments_.upfrontPayment->amount(); results_.couponLegNPV = 0.0; results_.defaultLegNPV = 0.0; for (Size i=0; i<arguments_.leg.size(); ++i) { if (arguments_.leg[i]->hasOccurred(settlementDate, includeSettlementDateFlows_)) continue; boost::shared_ptr<FixedRateCoupon> coupon = boost::dynamic_pointer_cast<FixedRateCoupon>(arguments_.leg[i]); // In order to avoid a few switches, we calculate the NPV // of both legs as a positive quantity. We'll give them // the right sign at the end. Date paymentDate = coupon->date(), startDate = (i == 0 ? arguments_.protectionStart : coupon->accrualStartDate()), endDate = coupon->accrualEndDate(); Date effectiveStartDate = (startDate <= today && today <= endDate) ? today : startDate; Real couponAmount = coupon->amount(); Probability S = probability_->survivalProbability(paymentDate); // On one side, we add the fixed rate payments in case of // survival. results_.couponLegNPV += S * couponAmount * discountCurve_->discount(paymentDate); // On the other side, we add the payment (and possibly the // accrual) in case of default. Period step = integrationStep_; Date d0 = effectiveStartDate; Date d1 = std::min(d0 + step, endDate); Probability P0 = probability_->defaultProbability(d0); DiscountFactor endDiscount = discountCurve_->discount(paymentDate); do { DiscountFactor B = arguments_.paysAtDefaultTime ? discountCurve_->discount(d1) : endDiscount; Probability P1 = probability_->defaultProbability(d1); Probability dP = P1 - P0; // accrual... if (arguments_.settlesAccrual) { if (arguments_.paysAtDefaultTime) results_.couponLegNPV += coupon->accruedAmount(d1) * B * dP; else results_.couponLegNPV += couponAmount * B * dP; } // ...and claim. Real claim = arguments_.claim->amount(d1, arguments_.notional, recoveryRate_); results_.defaultLegNPV += claim * B * dP; // setup for next time around the loop P0 = P1; d0 = d1; d1 = std::min(d0 + step, endDate); } while (d0 < endDate); } Real upfrontSign = 1.0; switch (arguments_.side) { case Protection::Seller: results_.defaultLegNPV *= -1.0; break; case Protection::Buyer: results_.couponLegNPV *= -1.0; results_.upfrontNPV *= -1.0; upfrontSign = -1.0; break; default: QL_FAIL("unknown protection side"); } results_.value = results_.defaultLegNPV+results_.couponLegNPV+results_.upfrontNPV; results_.errorEstimate = Null<Real>(); if (results_.couponLegNPV != 0.0) { results_.fairSpread = -results_.defaultLegNPV*arguments_.spread/results_.couponLegNPV; } else { results_.fairSpread = Null<Rate>(); } Real upfrontSensitivity = upfPVO1 * arguments_.notional; if (upfrontSensitivity != 0.0) { results_.fairUpfront = -upfrontSign*(results_.defaultLegNPV + results_.couponLegNPV) / upfrontSensitivity; } else { results_.fairUpfront = Null<Rate>(); } static const Rate basisPoint = 1.0e-4; if (arguments_.spread != 0.0) { results_.couponLegBPS = results_.couponLegNPV*basisPoint/arguments_.spread; } else { results_.couponLegBPS = Null<Rate>(); } if (arguments_.upfront && *arguments_.upfront != 0.0) { results_.upfrontBPS = results_.upfrontNPV*basisPoint/(*arguments_.upfront); } else { results_.upfrontBPS = Null<Rate>(); } }
virtual Date maxDate() const { //FIXME approx return optionDateFromTenor( Period((int)ceil(interpolation_.xMax()),Years)); }
MakeOIS::operator boost::shared_ptr<OvernightIndexedSwap>() const { const Calendar& calendar = overnightIndex_->fixingCalendar(); Date startDate; if (effectiveDate_ != Date()) startDate = effectiveDate_; else { Date referenceDate = Settings::instance().evaluationDate(); Date spotDate = calendar.advance(referenceDate, fixingDays_*Days); startDate = spotDate+forwardStart_; } Date endDate; if (terminationDate_ != Date()) { endDate = terminationDate_; } else { if (endOfMonth_) { endDate = calendar.advance(startDate, swapTenor_, ModifiedFollowing, endOfMonth_); } else { endDate = startDate+swapTenor_; } } Schedule schedule(startDate, endDate, Period(paymentFrequency_), calendar, ModifiedFollowing, ModifiedFollowing, rule_, endOfMonth_); Rate usedFixedRate = fixedRate_; if (fixedRate_ == Null<Rate>()) { QL_REQUIRE(!overnightIndex_->forwardingTermStructure().empty(), "null term structure set to this instance of " << overnightIndex_->name()); OvernightIndexedSwap temp(type_, nominal_, schedule, 0.0, // fixed rate fixedDayCount_, overnightIndex_, overnightSpread_); // ATM on the forecasting curve bool includeSettlementDateFlows = false; temp.setPricingEngine(boost::shared_ptr<PricingEngine>( new DiscountingSwapEngine( overnightIndex_->forwardingTermStructure(), includeSettlementDateFlows))); usedFixedRate = temp.fairRate(); } boost::shared_ptr<OvernightIndexedSwap> ois(new OvernightIndexedSwap(type_, nominal_, schedule, usedFixedRate, fixedDayCount_, overnightIndex_, overnightSpread_)); ois->setPricingEngine(engine_); return ois; }
FixedRateLeg::operator Leg() const { QL_REQUIRE(!couponRates_.empty(), "no coupon rates given"); QL_REQUIRE(!notionals_.empty(), "no notional given"); Leg leg; leg.reserve(schedule_.size()-1); // first period might be short or long Date start = schedule_.date(0), end = schedule_.date(1); Date paymentDate = paymentCalendar_.advance(end, paymentLag_, Days, paymentAdjustment_); Date exCouponDate; InterestRate rate = couponRates_[0]; Real nominal = notionals_[0]; if (exCouponPeriod_ != Period()) { exCouponDate = exCouponCalendar_.advance(paymentDate, -exCouponPeriod_, exCouponAdjustment_, exCouponEndOfMonth_); } Date ref = schedule_.hasTenor() && schedule_.hasIsRegular() && !schedule_.isRegular(1) ? schedule_.calendar().advance(end, -schedule_.tenor(), schedule_.businessDayConvention(), schedule_.endOfMonth()) : start; InterestRate r(rate.rate(), firstPeriodDC_.empty() ? rate.dayCounter() : firstPeriodDC_, rate.compounding(), rate.frequency()); leg.push_back(ext::shared_ptr<CashFlow>(new FixedRateCoupon(paymentDate, nominal, r, start, end, ref, end, exCouponDate))); // regular periods for (Size i=2; i<schedule_.size()-1; ++i) { start = end; end = schedule_.date(i); Date paymentDate = paymentCalendar_.advance(end, paymentLag_, Days, paymentAdjustment_); if (exCouponPeriod_ != Period()) { exCouponDate = exCouponCalendar_.advance(paymentDate, -exCouponPeriod_, exCouponAdjustment_, exCouponEndOfMonth_); } if ((i-1) < couponRates_.size()) rate = couponRates_[i-1]; else rate = couponRates_.back(); if ((i-1) < notionals_.size()) nominal = notionals_[i-1]; else nominal = notionals_.back(); leg.push_back(ext::shared_ptr<CashFlow>(new FixedRateCoupon(paymentDate, nominal, rate, start, end, start, end, exCouponDate))); } if (schedule_.size() > 2) { // last period might be short or long Size N = schedule_.size(); start = end; end = schedule_.date(N-1); Date paymentDate = paymentCalendar_.advance(end, paymentLag_, Days, paymentAdjustment_); if (exCouponPeriod_ != Period()) { exCouponDate = exCouponCalendar_.advance(paymentDate, -exCouponPeriod_, exCouponAdjustment_, exCouponEndOfMonth_); } if ((N-2) < couponRates_.size()) rate = couponRates_[N-2]; else rate = couponRates_.back(); if ((N-2) < notionals_.size()) nominal = notionals_[N-2]; else nominal = notionals_.back(); InterestRate r( rate.rate(), lastPeriodDC_.empty() ? rate.dayCounter() : lastPeriodDC_ , rate.compounding(), rate.frequency() ); if ((schedule_.hasIsRegular() && schedule_.isRegular(N - 1)) || !schedule_.hasTenor()) { leg.push_back(ext::shared_ptr<CashFlow>(new FixedRateCoupon(paymentDate, nominal, r, start, end, start, end, exCouponDate))); } else { Date ref = schedule_.calendar().advance( start, schedule_.tenor(), schedule_.businessDayConvention(), schedule_.endOfMonth()); leg.push_back(ext::shared_ptr<CashFlow>(new FixedRateCoupon(paymentDate, nominal, r, start, end, start, ref, exCouponDate))); } } return leg; }
KRWiborFX6M(const Handle<YieldTermStructure>& h = Handle<YieldTermStructure>()) : KRWiborFX(Period(6, Months), h) {}
MakeSchedule& MakeSchedule::withFrequency(Frequency frequency) { tenor_ = Period(frequency); return *this; }
Euribor10M(const Handle<YieldTermStructure>& h = Handle<YieldTermStructure>()) : Euribor(Period(10, Months), h) {}
void RandomLossLM<C, URNG>::nextSample( const std::vector<Real>& values) const { const ext::shared_ptr<Pool>& pool = this->basket_->pool(); this->simsBuffer_.push_back(std::vector<defaultSimEvent> ()); // half the model is defaults, the other half are RRs... for(Size iName=0; iName<copula_->size()/2; iName++) { // ...but samples must be full /* This is really a trick, we are passing a longer than expected set of values in the sample but the last idiosyncratic values corresponding to the RR are not used. They are used below only if we are in default. This works due to the way the SpotLossLM is split in two almost disjoint latent models and that theres no check on the vector size in the LM base class. */ Real latentVarSample = copula_->latentVarValue(values, iName); Probability simDefaultProb = copula_->cumulativeY(latentVarSample, iName); // If the default simulated lies before the max date: if (horizonDefaultPs_[iName] >= simDefaultProb) { const Handle<DefaultProbabilityTermStructure>& dfts = pool->get(pool->names()[iName]). // use 'live' names defaultProbability(this->basket_->defaultKeys()[iName]); // compute and store default time with respect to the // curve ref date: Size dateSTride = static_cast<Size>(Brent().solve(// casted from Real: detail::Root(dfts, simDefaultProb), accuracy_, 0., 1.)); /* // value if one approximates to a flat HR; // faster (>x2) but it introduces an error:.. // \todo: see how to include this 'polymorphically'. While // not the case in pricing in risk metrics/real // probabilities the curves are often flat static_cast<Size>(ceil(maxHorizon_ * std::log(1.-simDefaultProb) /std::log(1.-data_.horizonDefaultPs_[iName]))); */ // Determine the realized recovery rate: /* For this; 'conditionalRecovery' needs to compute the pdef on the realized def event date from the simulation. Yet, this might have fallen between todays date and the default TS reference date(usually a two day gap) To avoid requesting a negative time probability the date is moved to the TS date Unless the gap is ridiculous this has no practical effect for the RR value*/ Date today = Settings::instance().evaluationDate(); Date eventDate = today+Period(static_cast<Integer>(dateSTride), Days); if(eventDate<dfts->referenceDate()) eventDate = dfts->referenceDate(); Real latentRRVarSample = copula_->latentRRVarValue(values, iName); Real recovery = copula_->conditionalRecovery(latentRRVarSample, iName, eventDate); this->simsBuffer_.back().push_back( defaultSimEvent(iName, dateSTride, recovery)); //emplace_back } /* Used to remove sims with no events. Uses less memory, faster post-statistics. But only if all names in the portfolio have low default probability, otherwise is more expensive and sim access has to be modified. However low probability is also an indicator that variance reduction is needed. */ //if(simsBuffer.back().empty()) { // emptySims_++;// Size; intilzd to zero // simsBuffer.pop_back(); //} } }
Euribor365_2W(const Handle<YieldTermStructure>& h = Handle<YieldTermStructure>()) : Euribor365(Period(2, Weeks), h) {}
Period periodFromFrequency(Frequency f) { return Period(f); }
Euribor365_11M(const Handle<YieldTermStructure>& h = Handle<YieldTermStructure>()) : Euribor365(Period(11, Months), h) {}
MakeVanillaSwap::operator ext::shared_ptr<VanillaSwap>() const { Date startDate; if (effectiveDate_ != Date()) startDate = effectiveDate_; else { Date refDate = Settings::instance().evaluationDate(); // if the evaluation date is not a business day // then move to the next business day refDate = floatCalendar_.adjust(refDate); Date spotDate = floatCalendar_.advance(refDate, settlementDays_*Days); startDate = spotDate+forwardStart_; if (forwardStart_.length()<0) startDate = floatCalendar_.adjust(startDate, Preceding); else startDate = floatCalendar_.adjust(startDate, Following); } Date endDate = terminationDate_; if (endDate == Date()) { if (floatEndOfMonth_) endDate = floatCalendar_.advance(startDate, swapTenor_, ModifiedFollowing, floatEndOfMonth_); else endDate = startDate + swapTenor_; } const Currency& curr = iborIndex_->currency(); Period fixedTenor; if (fixedTenor_ != Period()) fixedTenor = fixedTenor_; else { if ((curr == EURCurrency()) || (curr == USDCurrency()) || (curr == CHFCurrency()) || (curr == SEKCurrency()) || (curr == GBPCurrency() && swapTenor_ <= 1 * Years)) fixedTenor = Period(1, Years); else if ((curr == GBPCurrency() && swapTenor_ > 1 * Years) || (curr == JPYCurrency()) || (curr == AUDCurrency() && swapTenor_ >= 4 * Years)) fixedTenor = Period(6, Months); else if ((curr == HKDCurrency() || (curr == AUDCurrency() && swapTenor_ < 4 * Years))) fixedTenor = Period(3, Months); else QL_FAIL("unknown fixed leg default tenor for " << curr); } Schedule fixedSchedule(startDate, endDate, fixedTenor, fixedCalendar_, fixedConvention_, fixedTerminationDateConvention_, fixedRule_, fixedEndOfMonth_, fixedFirstDate_, fixedNextToLastDate_); Schedule floatSchedule(startDate, endDate, floatTenor_, floatCalendar_, floatConvention_, floatTerminationDateConvention_, floatRule_, floatEndOfMonth_, floatFirstDate_, floatNextToLastDate_); DayCounter fixedDayCount; if (fixedDayCount_ != DayCounter()) fixedDayCount = fixedDayCount_; else { if (curr == USDCurrency()) fixedDayCount = Actual360(); else if (curr == EURCurrency() || curr == CHFCurrency() || curr == SEKCurrency()) fixedDayCount = Thirty360(Thirty360::BondBasis); else if (curr == GBPCurrency() || curr == JPYCurrency() || curr == AUDCurrency() || curr == HKDCurrency()) fixedDayCount = Actual365Fixed(); else QL_FAIL("unknown fixed leg day counter for " << curr); } Rate usedFixedRate = fixedRate_; if (fixedRate_ == Null<Rate>()) { VanillaSwap temp(type_, nominal_, fixedSchedule, 0.0, // fixed rate fixedDayCount, floatSchedule, iborIndex_, floatSpread_, floatDayCount_); if (engine_ == 0) { Handle<YieldTermStructure> disc = iborIndex_->forwardingTermStructure(); QL_REQUIRE(!disc.empty(), "null term structure set to this instance of " << iborIndex_->name()); bool includeSettlementDateFlows = false; ext::shared_ptr<PricingEngine> engine(new DiscountingSwapEngine(disc, includeSettlementDateFlows)); temp.setPricingEngine(engine); } else temp.setPricingEngine(engine_); usedFixedRate = temp.fairRate(); } ext::shared_ptr<VanillaSwap> swap(new VanillaSwap(type_, nominal_, fixedSchedule, usedFixedRate, fixedDayCount, floatSchedule, iborIndex_, floatSpread_, floatDayCount_)); if (engine_ == 0) { Handle<YieldTermStructure> disc = iborIndex_->forwardingTermStructure(); bool includeSettlementDateFlows = false; ext::shared_ptr<PricingEngine> engine(new DiscountingSwapEngine(disc, includeSettlementDateFlows)); swap->setPricingEngine(engine); } else swap->setPricingEngine(engine_); return swap; }
Euribor365_1Y(const Handle<YieldTermStructure>& h = Handle<YieldTermStructure>()) : Euribor365(Period(1, Years), h) {}
FloatingRateBond::FloatingRateBond( Natural settlementDays, Real faceAmount, const Date& startDate, const Date& maturityDate, Frequency couponFrequency, const Calendar& calendar, const ext::shared_ptr<IborIndex>& iborIndex, const DayCounter& accrualDayCounter, BusinessDayConvention accrualConvention, BusinessDayConvention paymentConvention, Natural fixingDays, const std::vector<Real>& gearings, const std::vector<Spread>& spreads, const std::vector<Rate>& caps, const std::vector<Rate>& floors, bool inArrears, Real redemption, const Date& issueDate, const Date& stubDate, DateGeneration::Rule rule, bool endOfMonth) : Bond(settlementDays, calendar, issueDate) { maturityDate_ = maturityDate; Date firstDate, nextToLastDate; switch (rule) { case DateGeneration::Backward: firstDate = Date(); nextToLastDate = stubDate; break; case DateGeneration::Forward: firstDate = stubDate; nextToLastDate = Date(); break; case DateGeneration::Zero: case DateGeneration::ThirdWednesday: case DateGeneration::Twentieth: case DateGeneration::TwentiethIMM: QL_FAIL("stub date (" << stubDate << ") not allowed with " << rule << " DateGeneration::Rule"); default: QL_FAIL("unknown DateGeneration::Rule (" << Integer(rule) << ")"); } Schedule schedule(startDate, maturityDate_, Period(couponFrequency), calendar_, accrualConvention, accrualConvention, rule, endOfMonth, firstDate, nextToLastDate); cashflows_ = IborLeg(schedule, iborIndex) .withNotionals(faceAmount) .withPaymentDayCounter(accrualDayCounter) .withPaymentAdjustment(paymentConvention) .withFixingDays(fixingDays) .withGearings(gearings) .withSpreads(spreads) .withCaps(caps) .withFloors(floors) .inArrears(inArrears); addRedemptionsToCashflows(std::vector<Real>(1, redemption)); QL_ENSURE(!cashflows().empty(), "bond with no cashflows!"); QL_ENSURE(redemptions_.size() == 1, "multiple redemptions created"); registerWith(iborIndex); }
Euribor3W(const Handle<YieldTermStructure>& h = Handle<YieldTermStructure>()) : Euribor(Period(3, Weeks), h) {}
MakeOIS::operator ext::shared_ptr<OvernightIndexedSwap>() const { Date startDate; if (effectiveDate_ != Date()) startDate = effectiveDate_; else { Date refDate = Settings::instance().evaluationDate(); // if the evaluation date is not a business day // then move to the next business day refDate = calendar_.adjust(refDate); Date spotDate = calendar_.advance(refDate, settlementDays_*Days); startDate = spotDate+forwardStart_; if (forwardStart_.length()<0) startDate = calendar_.adjust(startDate, Preceding); else startDate = calendar_.adjust(startDate, Following); } // OIS end of month default bool usedEndOfMonth = isDefaultEOM_ ? calendar_.isEndOfMonth(startDate) : endOfMonth_; Date endDate = terminationDate_; if (endDate == Date()) { if (usedEndOfMonth) endDate = calendar_.advance(startDate, swapTenor_, ModifiedFollowing, usedEndOfMonth); else endDate = startDate + swapTenor_; } Schedule schedule(startDate, endDate, Period(paymentFrequency_), calendar_, ModifiedFollowing, ModifiedFollowing, rule_, usedEndOfMonth); Rate usedFixedRate = fixedRate_; if (fixedRate_ == Null<Rate>()) { OvernightIndexedSwap temp(type_, nominal_, schedule, 0.0, // fixed rate fixedDayCount_, overnightIndex_, overnightSpread_, paymentLag_, paymentAdjustment_, paymentCalendar_, telescopicValueDates_); if (engine_ == 0) { Handle<YieldTermStructure> disc = overnightIndex_->forwardingTermStructure(); QL_REQUIRE(!disc.empty(), "null term structure set to this instance of " << overnightIndex_->name()); bool includeSettlementDateFlows = false; ext::shared_ptr<PricingEngine> engine(new DiscountingSwapEngine(disc, includeSettlementDateFlows)); temp.setPricingEngine(engine); } else temp.setPricingEngine(engine_); usedFixedRate = temp.fairRate(); } ext::shared_ptr<OvernightIndexedSwap> ois(new OvernightIndexedSwap(type_, nominal_, schedule, usedFixedRate, fixedDayCount_, overnightIndex_, overnightSpread_, paymentLag_, paymentAdjustment_, paymentCalendar_, telescopicValueDates_)); if (engine_ == 0) { Handle<YieldTermStructure> disc = overnightIndex_->forwardingTermStructure(); bool includeSettlementDateFlows = false; ext::shared_ptr<PricingEngine> engine(new DiscountingSwapEngine(disc, includeSettlementDateFlows)); ois->setPricingEngine(engine); } else ois->setPricingEngine(engine_); return ois; }
void test1Y10(boost::shared_ptr<IborIndex> libor, utilities::csvBuilder & file) { std::cout << "Testing calibration of a Libor forward model with 1Y10 settings..." << std::endl; /* basic settings */ SavedSettings backup; const Size size_ = 44; // 44 trimesters const Real tolerance_ = 8e-3; // tolerance boost::shared_ptr<IborIndex> libor = curveCreation(); std::vector<swaptionData> swaptions // swaption data = std::vector<swaptionData> { //{ 50.200, Period(1, Months), Period(1, Years) }, { 54.475, Period(3, Months), Period(1, Years) }, { 63.350, Period(6, Months), Period(1, Years) }, //{ 68.650, Period(1, Years), Period(1, Years) }, //{ 49.850, Period(2, Years), Period(1, Years) }, //{ 38.500, Period(3, Years), Period(1, Years) }, //{ 31.900, Period(4, Years), Period(1, Years) }, //{ 28.500, Period(5, Years), Period(1, Years) }, { 26.500, Period(6, Years), Period(1, Years) }, { 24.625, Period(7, Years), Period(1, Years) }, { 23.500, Period(8, Years), Period(1, Years) }, { 22.550, Period(9, Years), Period(1, Years) }, { 21.150, Period(10, Years), Period(1, Years) }, //{ 60.950, Period(1, Months), Period(2, Years) }, { 55.700, Period(3, Months), Period(2, Years) }, { 58.100, Period(6, Months), Period(2, Years) }, { 56.550, Period(1, Years), Period(2, Years) }, //{ 42.600, Period(2, Years), Period(2, Years) }, //{ 34.300, Period(3, Years), Period(2, Years) }, //{ 29.400, Period(4, Years), Period(2, Years) }, { 26.900, Period(5, Years), Period(2, Years) }, { 25.050, Period(6, Years), Period(2, Years) }, { 23.800, Period(7, Years), Period(2, Years) }, { 22.700, Period(8, Years), Period(2, Years) }, { 21.600, Period(9, Years), Period(2, Years) }, { 51.050, Period(1, Months), Period(3, Years) }, { 48.300, Period(3, Months), Period(3, Years) }, { 48.900, Period(6, Months), Period(3, Years) }, { 47.000, Period(1, Years), Period(3, Years) }, //{ 37.000, Period(2, Years), Period(3, Years) }, //{ 31.275, Period(3, Years), Period(3, Years) }, { 27.650, Period(4, Years), Period(3, Years) }, { 25.650, Period(5, Years), Period(3, Years) }, { 24.200, Period(6, Years), Period(3, Years) }, { 23.050, Period(7, Years), Period(3, Years) }, { 22.050, Period(8, Years), Period(3, Years) }, { 40.800, Period(1, Months), Period(4, Years) }, { 40.225, Period(3, Months), Period(4, Years) }, { 40.200, Period(6, Months), Period(4, Years) }, { 39.300, Period(1, Years), Period(4, Years) }, //{ 32.800, Period(2, Years), Period(4, Years) }, { 28.925, Period(3, Years), Period(4, Years) }, { 26.150, Period(4, Years), Period(4, Years) }, { 24.650, Period(5, Years), Period(4, Years) }, { 23.500, Period(6, Years), Period(4, Years) }, { 22.350, Period(7, Years), Period(4, Years) }, { 36.025, Period(1, Months), Period(5, Years) }, { 35.600, Period(3, Months), Period(5, Years) }, { 35.425, Period(6, Months), Period(5, Years) }, { 34.250, Period(1, Years), Period(5, Years) }, { 29.850, Period(2, Years), Period(5, Years) }, { 26.900, Period(3, Years), Period(5, Years) }, { 24.925, Period(4, Years), Period(5, Years) }, { 23.700, Period(5, Years), Period(5, Years) }, { 22.750, Period(6, Years), Period(5, Years) }, { 30.800, Period(1, Months), Period(6, Years) }, { 30.700, Period(3, Months), Period(6, Years) }, //{ 31.200, Period(6, Months), Period(6, Years) }, { 30.750, Period(1, Years), Period(6, Years) }, { 27.850, Period(2, Years), Period(6, Years) }, { 25.775, Period(3, Years), Period(6, Years) }, { 24.125, Period(4, Years), Period(6, Years) }, { 23.100, Period(5, Years), Period(6, Years) }, { 27.600, Period(1, Months), Period(7, Years) }, { 27.750, Period(3, Months), Period(7, Years) }, { 28.350, Period(6, Months), Period(7, Years) }, { 28.200, Period(1, Years), Period(7, Years) }, { 26.400, Period(2, Years), Period(7, Years) }, { 24.750, Period(3, Years), Period(7, Years) }, { 23.400, Period(4, Years), Period(7, Years) }, { 25.250, Period(1, Months), Period(8, Years) }, { 25.450, Period(3, Months), Period(8, Years) }, { 26.000, Period(6, Months), Period(8, Years) }, { 26.250, Period(1, Years), Period(8, Years) }, { 25.225, Period(2, Years), Period(8, Years) }, { 23.875, Period(3, Years), Period(8, Years) }, { 23.575, Period(1, Months), Period(9, Years) }, { 23.575, Period(3, Months), Period(9, Years) }, { 24.600, Period(6, Months), Period(9, Years) }, { 24.800, Period(1, Years), Period(9, Years) }, { 24.150, Period(2, Years), Period(9, Years) }, { 22.275, Period(1, Months), Period(10, Years) }, { 22.125, Period(3, Months), Period(10, Years) }, { 23.300, Period(6, Months), Period(10, Years) }, { 23.600, Period(1, Years), Period(10, Years) } }; Handle<YieldTermStructure> termStructure = libor->forwardingTermStructure(); // set up the process boost::shared_ptr<LiborForwardModelProcess> process( new LiborForwardModelProcess(size_, libor)); std::vector<Time> fixingT = process->fixingTimes(); // set-up the model boost::shared_ptr<LmVolatilityModel> volaModel( new LmExtLinearExponentialVolModel(process->fixingTimes(), 0.5, 0.6, 0.1, 0.1)); boost::shared_ptr<LmCorrelationModel> corrModel( new LmLinearExponentialCorrelationModel(size_, 0.5, 0.8)); boost::shared_ptr<LiborForwardModel> model( new LiborForwardModel(process, volaModel, corrModel)); Size swapVolIndex = 0; DayCounter dayCounter = libor->forwardingTermStructure()->dayCounter(); // set-up calibration helper std::vector<boost::shared_ptr<CalibrationHelper> > calibrationHelper; Size i; for (i = 0; i < swaptions.size(); i++) { Handle<Quote> swaptionVol( boost::shared_ptr<Quote>( new SimpleQuote(swaptions[i].volatility_ / 100))); boost::shared_ptr<CalibrationHelper> swaptionHelper( new SwaptionHelper(swaptions[i].lenght_, swaptions[i].maturity_, swaptionVol, libor, libor->tenor(), dayCounter, libor->dayCounter(), termStructure, CalibrationHelper::ImpliedVolError)); swaptionHelper->setPricingEngine( boost::shared_ptr<PricingEngine>( new LfmSwaptionEngine(model, termStructure))); calibrationHelper.push_back(swaptionHelper); } #ifdef _DEBUG LevenbergMarquardt om(1e-5, 1e-5, 1e-5); model->calibrate(calibrationHelper, om, EndCriteria(100, 20, 1e-5, 1e-5, 1e-6)); #else boost::shared_ptr<OptimizationMethod> om( new LevenbergMarquardt(1e-6, 1e-6, 1e-6)); //boost::shared_ptr<OptimizationMethod> om(new SteepestDescent); model->calibrate(calibrationHelper, *om, EndCriteria(2000, 100, 1e-6, 1e-6, 1e-6)); #endif // measure the calibration error Real calculated = 0.0; for (i = 0; i<calibrationHelper.size(); ++i) { Real diff = calibrationHelper[i]->calibrationError(); calculated += diff * diff; } // create diagnostic file { std::string fileStr("C:/Temp/liborModel_1Y10_"); // build file path fileStr.append(boost::posix_time::to_iso_string( boost::posix_time::second_clock::local_time())); fileStr.append(".csv"); utilities::csvBuilder file(fileStr); // csv builder Array times(size_, 0.0); Array rates(size_, 0.0); // saves yield curve data for (int i = 0; i < size_; i++) { times[i] = fixingT[i]; // the fixing times from model rates[i] = libor->forwardingTermStructure()->zeroRate( times[i], Continuous); } file.add("times", 1, 1); file.add("rates", 1, 2); // adds the yield curve data file.add(times, 2, 1); file.add(rates, 2, 2); file.add(std::string("calibration result:"), 1, 4); // calibration result file.add(model->endCriteria(), 2, 4); file.add(std::string("calculated diff:"), 4, 4); // calibration result file.add(std::sqrt(calculated), 5, 4); file.add("correlation matrix at time zero", 1, 6); // correlation file.add(corrModel->correlation(0), 2, 6); } }
KorCD_9M(const Handle<YieldTermStructure>& h = Handle<YieldTermStructure>()) : KorCD(Period(9, Months), h) {}
FixedRateLeg::operator Leg() const { QL_REQUIRE(!couponRates_.empty(), "no coupon rates given"); QL_REQUIRE(!notionals_.empty(), "no notional given"); Leg leg; leg.reserve(schedule_.size()-1); Calendar schCalendar = schedule_.calendar(); // first period might be short or long Date start = schedule_.date(0), end = schedule_.date(1); Date paymentDate = calendar_.adjust(end, paymentAdjustment_); Date exCouponDate; InterestRate rate = couponRates_[0]; Real nominal = notionals_[0]; if (exCouponPeriod_ != Period()) { exCouponDate = exCouponCalendar_.advance(paymentDate, -exCouponPeriod_, exCouponAdjustment_, exCouponEndOfMonth_); } if (schedule_.isRegular(1)) { QL_REQUIRE(firstPeriodDC_.empty() || firstPeriodDC_ == rate.dayCounter(), "regular first coupon " "does not allow a first-period day count"); shared_ptr<CashFlow> temp(new FixedRateCoupon(paymentDate, nominal, rate, start, end, start, end, exCouponDate)); leg.push_back(temp); } else { Date ref = end - schedule_.tenor(); ref = schCalendar.adjust(ref, schedule_.businessDayConvention()); InterestRate r(rate.rate(), firstPeriodDC_.empty() ? rate.dayCounter() : firstPeriodDC_, rate.compounding(), rate.frequency()); leg.push_back(shared_ptr<CashFlow>(new FixedRateCoupon(paymentDate, nominal, r, start, end, ref, end, exCouponDate))); } // regular periods for (Size i=2; i<schedule_.size()-1; ++i) { start = end; end = schedule_.date(i); paymentDate = calendar_.adjust(end, paymentAdjustment_); if (exCouponPeriod_ != Period()) { exCouponDate = exCouponCalendar_.advance(paymentDate, -exCouponPeriod_, exCouponAdjustment_, exCouponEndOfMonth_); } if ((i-1) < couponRates_.size()) rate = couponRates_[i-1]; else rate = couponRates_.back(); if ((i-1) < notionals_.size()) nominal = notionals_[i-1]; else nominal = notionals_.back(); leg.push_back(shared_ptr<CashFlow>(new FixedRateCoupon(paymentDate, nominal, rate, start, end, start, end, exCouponDate))); } if (schedule_.size() > 2) { // last period might be short or long Size N = schedule_.size(); start = end; end = schedule_.date(N-1); paymentDate = calendar_.adjust(end, paymentAdjustment_); if (exCouponPeriod_ != Period()) { exCouponDate = exCouponCalendar_.advance(paymentDate, -exCouponPeriod_, exCouponAdjustment_, exCouponEndOfMonth_); } if ((N-2) < couponRates_.size()) rate = couponRates_[N-2]; else rate = couponRates_.back(); if ((N-2) < notionals_.size()) nominal = notionals_[N-2]; else nominal = notionals_.back(); if (schedule_.isRegular(N-1)) { leg.push_back(shared_ptr<CashFlow>(new FixedRateCoupon(paymentDate, nominal, rate, start, end, start, end, exCouponDate))); } else { Date ref = start + schedule_.tenor(); ref = schCalendar.adjust(ref, schedule_.businessDayConvention()); leg.push_back(shared_ptr<CashFlow>(new FixedRateCoupon(paymentDate, nominal, rate, start, end, start, ref, exCouponDate))); } } return leg; }