// Setup cyclic background refresh: set delta q and segmentation map. void vp9_cyclic_refresh_setup(VP9_COMP *const cpi) { VP9_COMMON *const cm = &cpi->common; const RATE_CONTROL *const rc = &cpi->rc; CYCLIC_REFRESH *const cr = cpi->cyclic_refresh; struct segmentation *const seg = &cm->seg; const int apply_cyclic_refresh = apply_cyclic_refresh_bitrate(cm, rc); if (cm->current_video_frame == 0) cr->low_content_avg = 0.0; // Don't apply refresh on key frame or temporal enhancement layer frames. if (!apply_cyclic_refresh || (cm->frame_type == KEY_FRAME) || (cpi->svc.temporal_layer_id > 0)) { // Set segmentation map to 0 and disable. unsigned char *const seg_map = cpi->segmentation_map; memset(seg_map, 0, cm->mi_rows * cm->mi_cols); vp9_disable_segmentation(&cm->seg); if (cm->frame_type == KEY_FRAME) { memset(cr->last_coded_q_map, MAXQ, cm->mi_rows * cm->mi_cols * sizeof(*cr->last_coded_q_map)); memset(cr->consec_zero_mv, 0, cm->mi_rows * cm->mi_cols * sizeof(*cr->consec_zero_mv)); cr->sb_index = 0; } return; } else { int qindex_delta = 0; int qindex2; const double q = vp9_convert_qindex_to_q(cm->base_qindex, cm->bit_depth); vpx_clear_system_state(); // Set rate threshold to some multiple (set to 2 for now) of the target // rate (target is given by sb64_target_rate and scaled by 256). cr->thresh_rate_sb = ((int64_t)(rc->sb64_target_rate) << 8) << 2; // Distortion threshold, quadratic in Q, scale factor to be adjusted. // q will not exceed 457, so (q * q) is within 32bit; see: // vp9_convert_qindex_to_q(), vp9_ac_quant(), ac_qlookup*[]. cr->thresh_dist_sb = ((int64_t)(q * q)) << 2; // Set up segmentation. // Clear down the segment map. vp9_enable_segmentation(&cm->seg); vp9_clearall_segfeatures(seg); // Select delta coding method. seg->abs_delta = SEGMENT_DELTADATA; // Note: setting temporal_update has no effect, as the seg-map coding method // (temporal or spatial) is determined in vp9_choose_segmap_coding_method(), // based on the coding cost of each method. For error_resilient mode on the // last_frame_seg_map is set to 0, so if temporal coding is used, it is // relative to 0 previous map. // seg->temporal_update = 0; // Segment BASE "Q" feature is disabled so it defaults to the baseline Q. vp9_disable_segfeature(seg, CR_SEGMENT_ID_BASE, SEG_LVL_ALT_Q); // Use segment BOOST1 for in-frame Q adjustment. vp9_enable_segfeature(seg, CR_SEGMENT_ID_BOOST1, SEG_LVL_ALT_Q); // Use segment BOOST2 for more aggressive in-frame Q adjustment. vp9_enable_segfeature(seg, CR_SEGMENT_ID_BOOST2, SEG_LVL_ALT_Q); // Set the q delta for segment BOOST1. qindex_delta = compute_deltaq(cpi, cm->base_qindex, cr->rate_ratio_qdelta); cr->qindex_delta[1] = qindex_delta; // Compute rd-mult for segment BOOST1. qindex2 = clamp(cm->base_qindex + cm->y_dc_delta_q + qindex_delta, 0, MAXQ); cr->rdmult = vp9_compute_rd_mult(cpi, qindex2); vp9_set_segdata(seg, CR_SEGMENT_ID_BOOST1, SEG_LVL_ALT_Q, qindex_delta); // Set a more aggressive (higher) q delta for segment BOOST2. qindex_delta = compute_deltaq( cpi, cm->base_qindex, VPXMIN(CR_MAX_RATE_TARGET_RATIO, 0.1 * cr->rate_boost_fac * cr->rate_ratio_qdelta)); cr->qindex_delta[2] = qindex_delta; vp9_set_segdata(seg, CR_SEGMENT_ID_BOOST2, SEG_LVL_ALT_Q, qindex_delta); // Reset if resoluton change has occurred. if (cpi->resize_pending != 0) vp9_cyclic_refresh_reset_resize(cpi); // Update the segmentation and refresh map. cyclic_refresh_update_map(cpi); } }
void vp9_setup_in_frame_q_adj(VP9_COMP *cpi) { VP9_COMMON *const cm = &cpi->common; struct segmentation *const seg = &cm->seg; // Make SURE use of floating point in this function is safe. vp9_clear_system_state(); if (cm->frame_type == KEY_FRAME || cpi->refresh_alt_ref_frame || (cpi->refresh_golden_frame && !cpi->rc.is_src_frame_alt_ref)) { int segment; const int aq_strength = get_aq_c_strength(cm->base_qindex, cm->bit_depth); // Clear down the segment map. memset(cpi->segmentation_map, DEFAULT_AQ2_SEG, cm->mi_rows * cm->mi_cols); vp9_clearall_segfeatures(seg); // Segmentation only makes sense if the target bits per SB is above a // threshold. Below this the overheads will usually outweigh any benefit. if (cpi->rc.sb64_target_rate < 256) { vp9_disable_segmentation(seg); return; } vp9_enable_segmentation(seg); // Select delta coding method. seg->abs_delta = SEGMENT_DELTADATA; // Default segment "Q" feature is disabled so it defaults to the baseline Q. vp9_disable_segfeature(seg, DEFAULT_AQ2_SEG, SEG_LVL_ALT_Q); // Use some of the segments for in frame Q adjustment. for (segment = 0; segment < AQ_C_SEGMENTS; ++segment) { int qindex_delta; if (segment == DEFAULT_AQ2_SEG) continue; qindex_delta = vp9_compute_qdelta_by_rate(&cpi->rc, cm->frame_type, cm->base_qindex, aq_c_q_adj_factor[aq_strength][segment], cm->bit_depth); // For AQ complexity mode, we dont allow Q0 in a segment if the base // Q is not 0. Q0 (lossless) implies 4x4 only and in AQ mode 2 a segment // Q delta is sometimes applied without going back around the rd loop. // This could lead to an illegal combination of partition size and q. if ((cm->base_qindex != 0) && ((cm->base_qindex + qindex_delta) == 0)) { qindex_delta = -cm->base_qindex + 1; } if ((cm->base_qindex + qindex_delta) > 0) { vp9_enable_segfeature(seg, segment, SEG_LVL_ALT_Q); vp9_set_segdata(seg, segment, SEG_LVL_ALT_Q, qindex_delta); } } } }
// void separate_arf_mbs_byzz static void separate_arf_mbs(VP9_COMP *cpi) { VP9_COMMON *const cm = &cpi->common; int mb_col, mb_row, offset, i; int mi_row, mi_col; int ncnt[4] = { 0 }; int n_frames = cpi->mbgraph_n_frames; int *arf_not_zz; CHECK_MEM_ERROR(cm, arf_not_zz, vpx_calloc(cm->mb_rows * cm->mb_cols * sizeof(*arf_not_zz), 1)); // We are not interested in results beyond the alt ref itself. if (n_frames > cpi->rc.frames_till_gf_update_due) n_frames = cpi->rc.frames_till_gf_update_due; // defer cost to reference frames for (i = n_frames - 1; i >= 0; i--) { MBGRAPH_FRAME_STATS *frame_stats = &cpi->mbgraph_stats[i]; for (offset = 0, mb_row = 0; mb_row < cm->mb_rows; offset += cm->mb_cols, mb_row++) { for (mb_col = 0; mb_col < cm->mb_cols; mb_col++) { MBGRAPH_MB_STATS *mb_stats = &frame_stats->mb_stats[offset + mb_col]; int altref_err = mb_stats->ref[ALTREF_FRAME].err; int intra_err = mb_stats->ref[INTRA_FRAME ].err; int golden_err = mb_stats->ref[GOLDEN_FRAME].err; // Test for altref vs intra and gf and that its mv was 0,0. if (altref_err > 1000 || altref_err > intra_err || altref_err > golden_err) { arf_not_zz[offset + mb_col]++; } } } } // arf_not_zz is indexed by MB, but this loop is indexed by MI to avoid out // of bound access in segmentation_map for (mi_row = 0; mi_row < cm->mi_rows; mi_row++) { for (mi_col = 0; mi_col < cm->mi_cols; mi_col++) { // If any of the blocks in the sequence failed then the MB // goes in segment 0 if (arf_not_zz[mi_row / 2 * cm->mb_cols + mi_col / 2]) { ncnt[0]++; cpi->segmentation_map[mi_row * cm->mi_cols + mi_col] = 0; } else { cpi->segmentation_map[mi_row * cm->mi_cols + mi_col] = 1; ncnt[1]++; } } } // Only bother with segmentation if over 10% of the MBs in static segment // if ( ncnt[1] && (ncnt[0] / ncnt[1] < 10) ) if (1) { // Note % of blocks that are marked as static if (cm->MBs) cpi->static_mb_pct = (ncnt[1] * 100) / (cm->mi_rows * cm->mi_cols); // This error case should not be reachable as this function should // never be called with the common data structure uninitialized. else cpi->static_mb_pct = 0; vp9_enable_segmentation(&cm->seg); } else { cpi->static_mb_pct = 0; vp9_disable_segmentation(&cm->seg); } // Free localy allocated storage vpx_free(arf_not_zz); }
// Setup cyclic background refresh: set delta q and segmentation map. void vp9_cyclic_refresh_setup(VP9_COMP *const cpi) { VP9_COMMON *const cm = &cpi->common; const RATE_CONTROL *const rc = &cpi->rc; CYCLIC_REFRESH *const cr = cpi->cyclic_refresh; struct segmentation *const seg = &cm->seg; unsigned char *const seg_map = cpi->segmentation_map; const int apply_cyclic_refresh = apply_cyclic_refresh_bitrate(cm, rc); // Don't apply refresh on key frame or enhancement layer frames. if (!apply_cyclic_refresh || (cm->frame_type == KEY_FRAME) || (cpi->svc.temporal_layer_id > 0)) { // Set segmentation map to 0 and disable. vpx_memset(seg_map, 0, cm->mi_rows * cm->mi_cols); vp9_disable_segmentation(&cm->seg); if (cm->frame_type == KEY_FRAME) cr->sb_index = 0; return; } else { int qindex_delta = 0; int i, block_count, bl_index, sb_rows, sb_cols, sbs_in_frame; int xmis, ymis, x, y, qindex2; // Rate target ratio to set q delta. const float rate_ratio_qdelta = 2.0; const double q = vp9_convert_qindex_to_q(cm->base_qindex); vp9_clear_system_state(); // Some of these parameters may be set via codec-control function later. cr->max_sbs_perframe = 10; cr->max_qdelta_perc = 50; cr->min_block_size = BLOCK_8X8; cr->time_for_refresh = 1; // Set rate threshold to some fraction of target (and scaled by 256). cr->thresh_rate_sb = (rc->sb64_target_rate * 256) >> 2; // Distortion threshold, quadratic in Q, scale factor to be adjusted. cr->thresh_dist_sb = 8 * (int)(q * q); if (cpi->sf.use_nonrd_pick_mode) { // May want to be more conservative with thresholds in non-rd mode for now // as rate/distortion are derived from model based on prediction residual. cr->thresh_rate_sb = (rc->sb64_target_rate * 256) >> 3; cr->thresh_dist_sb = 4 * (int)(q * q); } cr->num_seg_blocks = 0; // Set up segmentation. // Clear down the segment map. vpx_memset(seg_map, 0, cm->mi_rows * cm->mi_cols); vp9_enable_segmentation(&cm->seg); vp9_clearall_segfeatures(seg); // Select delta coding method. seg->abs_delta = SEGMENT_DELTADATA; // Note: setting temporal_update has no effect, as the seg-map coding method // (temporal or spatial) is determined in vp9_choose_segmap_coding_method(), // based on the coding cost of each method. For error_resilient mode on the // last_frame_seg_map is set to 0, so if temporal coding is used, it is // relative to 0 previous map. // seg->temporal_update = 0; // Segment 0 "Q" feature is disabled so it defaults to the baseline Q. vp9_disable_segfeature(seg, 0, SEG_LVL_ALT_Q); // Use segment 1 for in-frame Q adjustment. vp9_enable_segfeature(seg, 1, SEG_LVL_ALT_Q); // Set the q delta for segment 1. qindex_delta = vp9_compute_qdelta_by_rate(rc, cm->frame_type, cm->base_qindex, rate_ratio_qdelta); // TODO(marpan): Incorporate the actual-vs-target rate over/undershoot from // previous encoded frame. if (-qindex_delta > cr->max_qdelta_perc * cm->base_qindex / 100) qindex_delta = -cr->max_qdelta_perc * cm->base_qindex / 100; // Compute rd-mult for segment 1. qindex2 = clamp(cm->base_qindex + cm->y_dc_delta_q + qindex_delta, 0, MAXQ); cr->rdmult = vp9_compute_rd_mult(cpi, qindex2); vp9_set_segdata(seg, 1, SEG_LVL_ALT_Q, qindex_delta); sb_cols = (cm->mi_cols + MI_BLOCK_SIZE - 1) / MI_BLOCK_SIZE; sb_rows = (cm->mi_rows + MI_BLOCK_SIZE - 1) / MI_BLOCK_SIZE; sbs_in_frame = sb_cols * sb_rows; // Number of target superblocks to get the q delta (segment 1). block_count = cr->max_sbs_perframe * sbs_in_frame / 100; // Set the segmentation map: cycle through the superblocks, starting at // cr->mb_index, and stopping when either block_count blocks have been found // to be refreshed, or we have passed through whole frame. assert(cr->sb_index < sbs_in_frame); i = cr->sb_index; do { int sum_map = 0; // Get the mi_row/mi_col corresponding to superblock index i. int sb_row_index = (i / sb_cols); int sb_col_index = i - sb_row_index * sb_cols; int mi_row = sb_row_index * MI_BLOCK_SIZE; int mi_col = sb_col_index * MI_BLOCK_SIZE; assert(mi_row >= 0 && mi_row < cm->mi_rows); assert(mi_col >= 0 && mi_col < cm->mi_cols); bl_index = mi_row * cm->mi_cols + mi_col; // Loop through all 8x8 blocks in superblock and update map. xmis = MIN(cm->mi_cols - mi_col, num_8x8_blocks_wide_lookup[BLOCK_64X64]); ymis = MIN(cm->mi_rows - mi_row, num_8x8_blocks_high_lookup[BLOCK_64X64]); for (y = 0; y < ymis; y++) { for (x = 0; x < xmis; x++) { const int bl_index2 = bl_index + y * cm->mi_cols + x; // If the block is as a candidate for clean up then mark it // for possible boost/refresh (segment 1). The segment id may get // reset to 0 later if block gets coded anything other than ZEROMV. if (cr->map[bl_index2] == 0) { seg_map[bl_index2] = 1; sum_map++; } else if (cr->map[bl_index2] < 0) { cr->map[bl_index2]++; } } } // Enforce constant segment over superblock. // If segment is partial over superblock, reset to either all 1 or 0. if (sum_map > 0 && sum_map < xmis * ymis) { const int new_value = (sum_map >= xmis * ymis / 2); for (y = 0; y < ymis; y++) for (x = 0; x < xmis; x++) seg_map[bl_index + y * cm->mi_cols + x] = new_value; } i++; if (i == sbs_in_frame) { i = 0; } if (sum_map >= xmis * ymis /2) block_count--; } while (block_count && i != cr->sb_index); cr->sb_index = i; }