f32 CatchScore::Accuracy() const { if (TotalHits() == 0) return 0; return Clamp(static_cast<f32>(TotalSuccessfulHits()) / TotalHits(), 0.0f, 1.0f); }
f32 OsuScore::Accuracy() const { if (TotalHits() == 0) return 0; return Clamp( static_cast<f32>(_num50 * 50 + _num100 * 100 + _num300 * 300) / (TotalHits() * 300), 0.0f, 1.0f ); }
void OsuScore::computeSpeedValue(const Beatmap& beatmap) { _speedValue = pow(5.0f * std::max(1.0f, beatmap.DifficultyAttribute(_mods, Beatmap::Speed) / 0.0675f) - 4.0f, 3.0f) / 100000.0f; int numTotalHits = TotalHits(); f32 approachRate = beatmap.DifficultyAttribute(_mods, Beatmap::AR); f32 approachRateFactor = 1.0f; if (approachRate > 10.33f) approachRateFactor += 0.3f * (approachRate - 10.33f); _speedValue *= approachRateFactor; // Longer maps are worth more _speedValue *= 0.95f + 0.4f * std::min(1.0f, static_cast<f32>(numTotalHits) / 2000.0f) + (numTotalHits > 2000 ? log10(static_cast<f32>(numTotalHits) / 2000.0f) * 0.5f : 0.0f); // Penalize misses exponentially. This mainly fixes tag4 maps and the likes until a per-hitobject solution is available _speedValue *= pow(0.97f, _numMiss); // Combo scaling float maxCombo = beatmap.DifficultyAttribute(_mods, Beatmap::MaxCombo); if (maxCombo > 0) _speedValue *= std::min(static_cast<f32>(pow(_maxCombo, 0.8f) / pow(maxCombo, 0.8f)), 1.0f); // We want to give more reward for lower AR when it comes to speed and HD. This nerfs high AR and buffs lower AR. if ((_mods & EMods::Hidden) > 0) _speedValue *= 1.0f + 0.04f * (12.0f - approachRate); // Scale the speed value with accuracy _slightly_ _speedValue *= 0.02f + Accuracy(); // It is important to also consider accuracy difficulty when doing that _speedValue *= 0.96f + (pow(beatmap.DifficultyAttribute(_mods, Beatmap::OD), 2) / 1600); }
void OsuScore::computeAccValue(const Beatmap& beatmap) { // This percentage only considers HitCircles of any value - in this part of the calculation we focus on hitting the timing hit window f32 betterAccuracyPercentage; s32 numHitObjectsWithAccuracy; if (beatmap.ScoreVersion() == Beatmap::EScoreVersion::ScoreV2) { numHitObjectsWithAccuracy = TotalHits(); betterAccuracyPercentage = Accuracy(); } // Either ScoreV1 or some unknown value. Let's default to previous behavior. else { numHitObjectsWithAccuracy = beatmap.NumHitCircles(); if (numHitObjectsWithAccuracy > 0) betterAccuracyPercentage = static_cast<f32>((_num300 - (TotalHits() - numHitObjectsWithAccuracy)) * 6 + _num100 * 2 + _num50) / (numHitObjectsWithAccuracy * 6); else betterAccuracyPercentage = 0; // It is possible to reach a negative accuracy with this formula. Cap it at zero - zero points if (betterAccuracyPercentage < 0) betterAccuracyPercentage = 0; } // Lots of arbitrary values from testing. // Considering to use derivation from perfect accuracy in a probabilistic manner - assume normal distribution _accValue = pow(1.52163f, beatmap.DifficultyAttribute(_mods, Beatmap::OD)) * pow(betterAccuracyPercentage, 24) * 2.83f; // Bonus for many hitcircles - it's harder to keep good accuracy up for longer _accValue *= std::min(1.15f, static_cast<f32>(pow(numHitObjectsWithAccuracy / 1000.0f, 0.3f))); if ((_mods & EMods::Hidden) > 0) _accValue *= 1.08f; if ((_mods & EMods::Flashlight) > 0) _accValue *= 1.02f; }
SummaryDesc* Matcher::CreateSummaryDesc(size_t length, size_t min_length, int max_matches, int surround_len) { // No point in processing this document if no keywords found at all: if (TotalHits() <= 0) return NULL; LOG(debug, "Matcher: sum.desc (length %lu, min_length %lu, max matches %d, " "surround max %d)", static_cast<unsigned long>(length), static_cast<unsigned long>(min_length), max_matches, surround_len); return new SummaryDesc(this, length, min_length, max_matches, surround_len); }
void OsuScore::computeAimValue(const Beatmap& beatmap) { f32 rawAim = beatmap.DifficultyAttribute(_mods, Beatmap::Aim); if ((_mods & EMods::TouchDevice) > 0) rawAim = pow(rawAim, 0.8f); _aimValue = pow(5.0f * std::max(1.0f, rawAim / 0.0675f) - 4.0f, 3.0f) / 100000.0f; int numTotalHits = TotalHits(); // Longer maps are worth more f32 LengthBonus = 0.95f + 0.4f * std::min(1.0f, static_cast<f32>(numTotalHits) / 2000.0f) + (numTotalHits > 2000 ? log10(static_cast<f32>(numTotalHits) / 2000.0f) * 0.5f : 0.0f); _aimValue *= LengthBonus; // Penalize misses exponentially. This mainly fixes tag4 maps and the likes until a per-hitobject solution is available _aimValue *= pow(0.97f, _numMiss); // Combo scaling float maxCombo = beatmap.DifficultyAttribute(_mods, Beatmap::MaxCombo); if (maxCombo > 0) _aimValue *= std::min(static_cast<f32>(pow(_maxCombo, 0.8f) / pow(maxCombo, 0.8f)), 1.0f); f32 approachRate = beatmap.DifficultyAttribute(_mods, Beatmap::AR); f32 approachRateFactor = 1.0f; if (approachRate > 10.33f) approachRateFactor += 0.3f * (approachRate - 10.33f); else if (approachRate < 8.0f) { approachRateFactor += 0.01f * (8.0f - approachRate); } _aimValue *= approachRateFactor; // We want to give more reward for lower AR when it comes to aim and HD. This nerfs high AR and buffs lower AR. if ((_mods & EMods::Hidden) > 0) _aimValue *= 1.0f + 0.04f * (12.0f - approachRate); if ((_mods & EMods::Flashlight) > 0) // Apply object-based bonus for flashlight. _aimValue *= 1.0f + 0.35f * std::min(1.0f, static_cast<f32>(numTotalHits) / 200.0f) + (numTotalHits > 200 ? 0.3f * std::min(1.0f, static_cast<f32>(numTotalHits - 200) / 300.0f) + (numTotalHits > 500 ? static_cast<f32>(numTotalHits - 500) / 1200.0f : 0.0f) : 0.0f); // Scale the aim value with accuracy _slightly_ _aimValue *= 0.5f + Accuracy() / 2.0f; // It is important to also consider accuracy difficulty when doing that _aimValue *= 0.98f + (pow(beatmap.DifficultyAttribute(_mods, Beatmap::OD), 2) / 2500); }
// This should rather be called ProximityRank() now: long Matcher::GlobalRank() { // Proximity ranking only applies to multi term queries, return a constant // in all other cases: if (QueryTerms() <= 1) return _proximity_noconstraint_offset; match_candidate_set::iterator it = _matches.begin(); #ifdef JUNIPER_1_0_RANK if (it == _matches.end()) return 0; // Rank is computed as the rank of the best match within the document // boosted with the total number of found occurrences of any of the words in the query // normalized by the number of words in the query: return ((*it)->rank() >> 3) + ((TotalHits()/nterms) << 2); #else // Rank is computed as the rank of the 3 best matches within the document // with each subsequent match counting 80% of the previous match. // long rank_val = 0; const int quotient = 5; const int prod = 4; int r_quotient = 1; int r_prod = 1; const int best_matches = 3; // candidate(s) for parametrisation! for (int i = 0; i < best_matches && it != _matches.end(); i++) { rank_val += (((*it)->rank()*r_prod/r_quotient) >> 4); r_quotient *= quotient; r_prod *= prod; ++it; } // Return negative weight of no hits and any of the explicit limits in effect // Eg. NEAR/WITHIN but make exception for PHRASE since that is better //handled by the index in the cases where there are more information at that stage: if (!rank_val && _mo->HasConstraints()) return 0; // shift down to a more suitable range for fsearch. Multiply by configured boost // Add configured offset return (long)((double)(rank_val >> 1) * _proximity_factor) + _proximity_noconstraint_offset; #endif }