static char* test_scaling_equivalence() { load_histograms(); mu_assert( "Averages should be equivalent", compare_values( hdr_mean(cor_histogram) * 512, hdr_mean(scaled_cor_histogram), 0.000001)); mu_assert( "Total count should be equivalent", compare_int64( cor_histogram->total_count, scaled_cor_histogram->total_count)); int64_t expected_99th = hdr_value_at_percentile(cor_histogram, 99.0) * 512; int64_t scaled_99th = hdr_value_at_percentile(scaled_cor_histogram, 99.0); mu_assert( "99%'iles should be equivalent", compare_int64( hdr_lowest_equivalent_value(cor_histogram, expected_99th), hdr_lowest_equivalent_value(scaled_cor_histogram, scaled_99th))); mu_assert( "Max should be equivalent", compare_int64(hdr_max(cor_histogram) * 512, hdr_max(scaled_cor_histogram))); return 0; }
ERL_NIF_TERM _hh_mean(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) { hh_ctx_t* ctx = NULL; ErlNifResourceType* ctx_type = get_hh_ctx_type(env); if (argc != 1 || ctx_type == NULL || !enif_get_resource(env, argv[0], ctx_type, (void **)&ctx)) { return enif_make_badarg(env); } if (ctx != NULL) { if (ctx->data->total_count == 0) { return enif_make_double(env, 0.); } else { return enif_make_double( env, round_to_significant_figures( hdr_mean(ctx->data), ctx->significant_figures ) ); } } return make_error(env, "bad_hdr_histogram_nif_impl"); }
long double stats_mean(stats *stats) { if (stats->histogram != NULL) { return hdr_mean(stats->histogram); } if (stats->limit == 0) return 0.0; uint64_t sum = 0; for (uint64_t i = 0; i < stats->limit; i++) { sum += stats->data[i]; } return sum / (long double) stats->limit; }
void get_snapshot(Snapshot* snapshot) const { ScopedMutex l(&mutex_); hdr_histogram* h = histogram_; for (size_t i = 0; i < thread_state_->max_threads(); ++i) { histograms_[i].add(h); } snapshot->min = hdr_min(h); snapshot->max = hdr_max(h); snapshot->mean = static_cast<int64_t>(hdr_mean(h)); snapshot->stddev = static_cast<int64_t>(hdr_stddev(h)); snapshot->median = hdr_value_at_percentile(h, 50.0); snapshot->percentile_75th = hdr_value_at_percentile(h, 75.0); snapshot->percentile_95th = hdr_value_at_percentile(h, 95.0); snapshot->percentile_98th = hdr_value_at_percentile(h, 98.0); snapshot->percentile_99th = hdr_value_at_percentile(h, 99.0); snapshot->percentile_999th = hdr_value_at_percentile(h, 99.9); }
static char* test_string_encode_decode() { struct hdr_histogram *histogram, *hdr_new = NULL; hdr_alloc(INT64_C(3600) * 1000 * 1000, 3, &histogram); for (int i = 1; i < 100; i++) { hdr_record_value(histogram, i*i); } char *data; mu_assert("Failed to encode histogram data", hdr_log_encode(histogram, &data) == 0); mu_assert("Failed to decode histogram data", hdr_log_decode(&hdr_new, data, strlen(data)) == 0); mu_assert("Histograms should be the same", compare_histogram(histogram, hdr_new)); mu_assert("Mean different after encode/decode", compare_double(hdr_mean(histogram), hdr_mean(hdr_new), 0.001)); return 0; }
static char* test_string_encode_decode_2() { struct hdr_histogram *histogram, *hdr_new = NULL; hdr_alloc(1000, 3, &histogram); int i; for (i = 1; i < histogram->highest_trackable_value; i++) { hdr_record_value(histogram, i); } char *data; mu_assert( "Failed to encode histogram data", validate_return_code(hdr_log_encode(histogram, &data))); mu_assert( "Failed to decode histogram data", validate_return_code(hdr_log_decode(&hdr_new, data, strlen(data)))); mu_assert("Histograms should be the same", compare_histogram(histogram, hdr_new)); mu_assert("Mean different after encode/decode", compare_double(hdr_mean(histogram), hdr_mean(hdr_new), 0.001)); return 0; }
void report_to_statsd(Statsd *statsd, statsd_feedback *sf) { static statsd_feedback empty_feedback; if(!statsd) return; if(!sf) sf = &empty_feedback; statsd_resetBatch(statsd); #define SBATCH(t, str, value) \ do { \ int ret = statsd_addToBatch(statsd, t, str, value, 1); \ if(ret == STATSD_BATCH_FULL) { \ statsd_sendBatch(statsd); \ ret = statsd_addToBatch(statsd, t, str, value, 1); \ } \ assert(ret == STATSD_SUCCESS); \ } while(0) SBATCH(STATSD_COUNT, "connections.opened", sf->opened); SBATCH(STATSD_GAUGE, "connections.total", sf->conns_in + sf->conns_out); SBATCH(STATSD_GAUGE, "connections.total.in", sf->conns_in); SBATCH(STATSD_GAUGE, "connections.total.out", sf->conns_out); SBATCH(STATSD_GAUGE, "traffic.bitrate", sf->bps_in + sf->bps_out); SBATCH(STATSD_GAUGE, "traffic.bitrate.in", sf->bps_in); SBATCH(STATSD_GAUGE, "traffic.bitrate.out", sf->bps_out); SBATCH(STATSD_COUNT, "traffic.data", sf->traffic_delta.bytes_rcvd + sf->traffic_delta.bytes_sent); SBATCH(STATSD_COUNT, "traffic.data.rcvd", sf->traffic_delta.bytes_rcvd); SBATCH(STATSD_COUNT, "traffic.data.sent", sf->traffic_delta.bytes_sent); SBATCH(STATSD_COUNT, "traffic.data.reads", sf->traffic_delta.num_reads); SBATCH(STATSD_COUNT, "traffic.data.writes", sf->traffic_delta.num_writes); if((sf->latency && sf->latency->marker_histogram) || sf == &empty_feedback) { struct { unsigned p50; unsigned p95; unsigned p99; unsigned p99_5; unsigned mean; unsigned max; } lat; if(sf->latency && sf->latency->marker_histogram) { struct hdr_histogram *hist = sf->latency->marker_histogram; lat.p50 = hdr_value_at_percentile(hist, 50.0) / 10.0; lat.p95 = hdr_value_at_percentile(hist, 95.0) / 10.0; lat.p99 = hdr_value_at_percentile(hist, 99.0) / 10.0; lat.p99_5 = hdr_value_at_percentile(hist, 99.5) / 10.0; lat.mean = hdr_mean(hist) / 10.0; lat.max = hdr_max(hist) / 10.0; assert(lat.p95 < 1000000); assert(lat.mean < 1000000); assert(lat.max < 1000000); } else { memset(&lat, 0, sizeof(lat)); } SBATCH(STATSD_GAUGE, "latency.mean", lat.mean); SBATCH(STATSD_GAUGE, "latency.50", lat.p50); SBATCH(STATSD_GAUGE, "latency.95", lat.p95); SBATCH(STATSD_GAUGE, "latency.99", lat.p99); SBATCH(STATSD_GAUGE, "latency.99.5", lat.p99_5); SBATCH(STATSD_GAUGE, "latency.max", lat.max); } statsd_sendBatch(statsd); }