static double avg_gradient(pa_smoother *s, pa_usec_t x) { unsigned i, j, c = 0; int64_t ax = 0, ay = 0, k, t; double r; drop_old(s, x); /* First, calculate average of all measurements */ i = s->history_idx; for (j = s->n_history; j > 0; j--) { ax += s->history_x[i]; ay += s->history_y[i]; c++; i++; while (i >= HISTORY_MAX) i -= HISTORY_MAX; } /* Too few measurements, assume gradient of 1 */ if (c < 2) return 1; ax /= c; ay /= c; /* Now, do linear regression */ k = t = 0; i = s->history_idx; for (j = s->n_history; j > 0; j--) { int64_t dx, dy; dx = (int64_t) s->history_x[i] - ax; dy = (int64_t) s->history_y[i] - ay; k += dx*dy; t += dx*dx; i++; while (i >= HISTORY_MAX) i -= HISTORY_MAX; } r = (double) k / t; return s->monotonic && r < 0 ? 0 : r; }
static void add_to_history(pa_smoother *s, uint64_t x, uint64_t y) { unsigned j, i; pa_assert(s); /* First try to update an existing history entry */ i = s->history_idx; for (j = s->n_history; j > 0; j--) { if (s->history_x[i] == x) { s->history_y[i] = y; return; } REDUCE_INC(i); } /* Drop old entries */ drop_old(s, x); /* Calculate position for new entry */ j = s->history_idx + s->n_history; REDUCE(j); /* Fill in entry */ s->history_x[j] = x; s->history_y[j] = y; /* Adjust counter */ s->n_history ++; /* And make sure we don't store more entries than fit in */ if (s->n_history > HISTORY_MAX) { s->history_idx += s->n_history - HISTORY_MAX; REDUCE(s->history_idx); s->n_history = HISTORY_MAX; } }
static void add_to_history(pa_smoother *s, pa_usec_t x, pa_usec_t y) { unsigned j; pa_assert(s); drop_old(s, x); /* Calculate position for new entry */ j = s->history_idx + s->n_history; while (j >= HISTORY_MAX) j -= HISTORY_MAX; /* Fill in entry */ s->history_x[j] = x; s->history_y[j] = y; /* Adjust counter */ s->n_history ++; /* And make sure we don't store more entries than fit in */ if (s->n_history >= HISTORY_MAX) { s->history_idx += s->n_history - HISTORY_MAX; s->n_history = HISTORY_MAX; } }