static inline struct mbuf * priq_getq(struct priq_class *cl) { IFCQ_LOCK_ASSERT_HELD(cl->cl_pif->pif_ifq); #if CLASSQ_RIO if (q_is_rio(&cl->cl_q)) return (rio_getq(cl->cl_rio, &cl->cl_q)); else #endif /* CLASSQ_RIO */ #if CLASSQ_RED if (q_is_red(&cl->cl_q)) return (red_getq(cl->cl_red, &cl->cl_q)); else #endif /* CLASSQ_RED */ #if CLASSQ_BLUE if (q_is_blue(&cl->cl_q)) return (blue_getq(cl->cl_blue, &cl->cl_q)); else #endif /* CLASSQ_BLUE */ if (q_is_sfb(&cl->cl_q) && cl->cl_sfb != NULL) return (sfb_getq(cl->cl_sfb, &cl->cl_q)); return (_getq(&cl->cl_q)); }
static struct mbuf * priq_getq(struct priq_class *cl) { #ifdef ALTQ_RIO if (q_is_rio(cl->cl_q)) return rio_getq((rio_t *)cl->cl_red, cl->cl_q); #endif #ifdef ALTQ_RED if (q_is_red(cl->cl_q)) return red_getq(cl->cl_red, cl->cl_q); #endif return _getq(cl->cl_q); }
static inline struct mbuf * fairq_getq(struct fairq_class *cl, u_int64_t cur_time) { fairq_bucket_t *b; struct mbuf *m; IFCQ_LOCK_ASSERT_HELD(cl->cl_fif->fif_ifq); b = fairq_selectq(cl, 0); if (b == NULL) m = NULL; #if CLASSQ_RIO else if (cl->cl_qtype == Q_RIO) m = rio_getq(cl->cl_rio, &b->queue); #endif /* CLASSQ_RIO */ #if CLASSQ_RED else if (cl->cl_qtype == Q_RED) m = red_getq(cl->cl_red, &b->queue); #endif /* CLASSQ_RED */ #if CLASSQ_BLUE else if (cl->cl_qtype == Q_BLUE) m = blue_getq(cl->cl_blue, &b->queue); #endif /* CLASSQ_BLUE */ else if (cl->cl_qtype == Q_SFB && cl->cl_sfb != NULL) m = sfb_getq(cl->cl_sfb, &b->queue); else m = _getq(&b->queue); /* * Calculate the BW change */ if (m != NULL) { u_int64_t delta; /* * Per-class bandwidth calculation */ delta = (cur_time - cl->cl_last_time); if (delta > machclk_freq * 8) delta = machclk_freq * 8; cl->cl_bw_delta += delta; cl->cl_bw_bytes += m->m_pkthdr.len; cl->cl_last_time = cur_time; if (cl->cl_bw_delta > machclk_freq) { cl->cl_bw_delta -= cl->cl_bw_delta >> 2; cl->cl_bw_bytes -= cl->cl_bw_bytes >> 2; }