//-------------------------------------------------------------------------- Distribution LossDistBinomial::operator()(Size n, Real volume, Real probability) const { //-------------------------------------------------------------------------- n_ = n; probability_.clear(); probability_.resize(n_+1, 0.0); Distribution dist (nBuckets_, 0.0, maximum_); BinomialDistribution binomial (probability, n); for (Size i = 0; i <= n; i++) { if (volume_ * i <= maximum_) { probability_[i] = binomial(i); Size bucket = dist.locate(volume * i); dist.addDensity (bucket, probability_[i] / dist.dx(bucket)); dist.addAverage (bucket, volume * i); } } excessProbability_.clear(); excessProbability_.resize(n_+1, 0.0); excessProbability_[n_] = probability_[n_]; for (int k = n_-1; k >= 0; k--) excessProbability_[k] = excessProbability_[k+1] + probability_[k]; dist.normalize(); return dist; }
//-------------------------------------------------------------------------- Distribution LossDistHomogeneous::operator()(Real volume, const vector<Real>& p) const { //-------------------------------------------------------------------------- volume_ = volume; n_ = p.size(); probability_.clear(); probability_.resize(n_+1, 0.0); vector<Real> prev; probability_[0] = 1.0; for (Size k = 0; k < n_; k++) { prev = probability_; probability_[0] = prev[0] * (1.0 - p[k]); for (Size i = 1; i <= k; i++) probability_[i] = prev[i-1] * p[k] + prev[i] * (1.0 - p[k]); probability_[k+1] = prev[k] * p[k]; } excessProbability_.clear(); excessProbability_.resize(n_+1, 0.0); excessProbability_[n_] = probability_[n_]; for (int k = n_ - 1; k >= 0; k--) excessProbability_[k] = excessProbability_[k+1] + probability_[k]; Distribution dist (nBuckets_, 0.0, maximum_); for (Size i = 0; i <= n_; i++) { if (volume * i <= maximum_) { Size bucket = dist.locate(volume * i); dist.addDensity (bucket, probability_[i] / dist.dx(bucket)); dist.addAverage (bucket, volume*i); } } dist.normalize(); return dist; }
//-------------------------------------------------------------------------- Distribution LossDistBucketing::operator()(const vector<Real>& nominals, const vector<Real>& probabilities) const { //-------------------------------------------------------------------------- QL_REQUIRE (nominals.size() == probabilities.size(), "sizes differ: " << nominals.size() << " vs " << probabilities.size()); vector<Real> p (nBuckets_, 0.0); vector<Real> a (nBuckets_, 0.0); vector<Real> ap (nBuckets_, 0.0); p[0] = 1.0; a[0] = 0.0; Real dx = maximum_ / nBuckets_; for (Size k = 1; k < nBuckets_; k++) a[k] = dx * k + dx/2; for (Size i = 0; i < nominals.size(); i++) { Real L = nominals[i]; Real P = probabilities[i]; for (int k = a.size()-1; k >= 0; k--) { if (p[k] > 0) { int u = locateTargetBucket (a[k] + L, k); QL_REQUIRE (u >= 0, "u=" << u << " at i=" << i << " k=" << k); QL_REQUIRE (u >= k, "u=" << u << "<k=" << k << " at i=" << i); Real dp = p[k] * P; if (u == k) a[k] += P * L; else { // no update of a[u] and p[u] if u is beyond grid end if (u < int(nBuckets_)) { // a[u] remains unchanged, if dp = 0 if (dp > 0.0) { // on Windows, p[u]/dp could cause a NaN for // some very small values of p[k]. // Writing the above as (p[u]/p[k])/P prevents // the NaN. What can I say? Real f = 1.0 / (1.0 + (p[u]/p[k]) / P); a[u] = (1.0 - f) * a[u] + f * (a[k] + L); } /* formulation of Hull-White: if (p[u] + dp > 0) a[u] = (p[u] * a[u] + dp * (a[k] + L)) / (p[u] + dp); */ p[u] += dp; } p[k] -= dp; } } QL_REQUIRE(a[k] + epsilon_ >= dx * k && a[k] < dx * (k+1), "a out of range at k=" << k << ", contract " << i); } } Distribution dist (nBuckets_, 0.0, maximum_); for (Size i = 0; i < nBuckets_; i++) { dist.addDensity (i, p[i] / dx); dist.addAverage (i, a[i]); } return dist; }