// Setup cyclic background refresh: set delta q and segmentation map. void vp9_cyclic_refresh_setup(VP9_COMP *const cpi) { VP9_COMMON *const cm = &cpi->common; const RATE_CONTROL *const rc = &cpi->rc; CYCLIC_REFRESH *const cr = cpi->cyclic_refresh; struct segmentation *const seg = &cm->seg; const int apply_cyclic_refresh = apply_cyclic_refresh_bitrate(cm, rc); if (cm->current_video_frame == 0) cr->low_content_avg = 0.0; // Don't apply refresh on key frame or temporal enhancement layer frames. if (!apply_cyclic_refresh || (cm->frame_type == KEY_FRAME) || (cpi->svc.temporal_layer_id > 0)) { // Set segmentation map to 0 and disable. unsigned char *const seg_map = cpi->segmentation_map; memset(seg_map, 0, cm->mi_rows * cm->mi_cols); vp9_disable_segmentation(&cm->seg); if (cm->frame_type == KEY_FRAME) { memset(cr->last_coded_q_map, MAXQ, cm->mi_rows * cm->mi_cols * sizeof(*cr->last_coded_q_map)); memset(cr->consec_zero_mv, 0, cm->mi_rows * cm->mi_cols * sizeof(*cr->consec_zero_mv)); cr->sb_index = 0; } return; } else { int qindex_delta = 0; int qindex2; const double q = vp9_convert_qindex_to_q(cm->base_qindex, cm->bit_depth); vpx_clear_system_state(); // Set rate threshold to some multiple (set to 2 for now) of the target // rate (target is given by sb64_target_rate and scaled by 256). cr->thresh_rate_sb = ((int64_t)(rc->sb64_target_rate) << 8) << 2; // Distortion threshold, quadratic in Q, scale factor to be adjusted. // q will not exceed 457, so (q * q) is within 32bit; see: // vp9_convert_qindex_to_q(), vp9_ac_quant(), ac_qlookup*[]. cr->thresh_dist_sb = ((int64_t)(q * q)) << 2; // Set up segmentation. // Clear down the segment map. vp9_enable_segmentation(&cm->seg); vp9_clearall_segfeatures(seg); // Select delta coding method. seg->abs_delta = SEGMENT_DELTADATA; // Note: setting temporal_update has no effect, as the seg-map coding method // (temporal or spatial) is determined in vp9_choose_segmap_coding_method(), // based on the coding cost of each method. For error_resilient mode on the // last_frame_seg_map is set to 0, so if temporal coding is used, it is // relative to 0 previous map. // seg->temporal_update = 0; // Segment BASE "Q" feature is disabled so it defaults to the baseline Q. vp9_disable_segfeature(seg, CR_SEGMENT_ID_BASE, SEG_LVL_ALT_Q); // Use segment BOOST1 for in-frame Q adjustment. vp9_enable_segfeature(seg, CR_SEGMENT_ID_BOOST1, SEG_LVL_ALT_Q); // Use segment BOOST2 for more aggressive in-frame Q adjustment. vp9_enable_segfeature(seg, CR_SEGMENT_ID_BOOST2, SEG_LVL_ALT_Q); // Set the q delta for segment BOOST1. qindex_delta = compute_deltaq(cpi, cm->base_qindex, cr->rate_ratio_qdelta); cr->qindex_delta[1] = qindex_delta; // Compute rd-mult for segment BOOST1. qindex2 = clamp(cm->base_qindex + cm->y_dc_delta_q + qindex_delta, 0, MAXQ); cr->rdmult = vp9_compute_rd_mult(cpi, qindex2); vp9_set_segdata(seg, CR_SEGMENT_ID_BOOST1, SEG_LVL_ALT_Q, qindex_delta); // Set a more aggressive (higher) q delta for segment BOOST2. qindex_delta = compute_deltaq( cpi, cm->base_qindex, VPXMIN(CR_MAX_RATE_TARGET_RATIO, 0.1 * cr->rate_boost_fac * cr->rate_ratio_qdelta)); cr->qindex_delta[2] = qindex_delta; vp9_set_segdata(seg, CR_SEGMENT_ID_BOOST2, SEG_LVL_ALT_Q, qindex_delta); // Reset if resoluton change has occurred. if (cpi->resize_pending != 0) vp9_cyclic_refresh_reset_resize(cpi); // Update the segmentation and refresh map. cyclic_refresh_update_map(cpi); } }
int vp8dx_get_raw_frame(VP8D_COMP *pbi, YV12_BUFFER_CONFIG *sd, int64_t *time_stamp, int64_t *time_end_stamp, vp8_ppflags_t *flags) { int ret = -1; if (pbi->ready_for_new_data == 1) return ret; /* ie no raw frame to show!!! */ if (pbi->common.show_frame == 0) return ret; pbi->ready_for_new_data = 1; *time_stamp = pbi->last_time_stamp; *time_end_stamp = 0; #if CONFIG_POSTPROC ret = vp8_post_proc_frame(&pbi->common, sd, flags); #else (void)flags; if (pbi->common.frame_to_show) { *sd = *pbi->common.frame_to_show; sd->y_width = pbi->common.Width; sd->y_height = pbi->common.Height; sd->uv_height = pbi->common.Height / 2; ret = 0; } else { ret = -1; } #endif /*!CONFIG_POSTPROC*/ vpx_clear_system_state(); return ret; }
void vp10_setup_in_frame_q_adj(VP10_COMP *cpi) { VP10_COMMON *const cm = &cpi->common; struct segmentation *const seg = &cm->seg; // Make SURE use of floating point in this function is safe. vpx_clear_system_state(); if (frame_is_intra_only(cm) || cm->error_resilient_mode || cpi->refresh_alt_ref_frame || (cpi->refresh_golden_frame && !cpi->rc.is_src_frame_alt_ref)) { int segment; const int aq_strength = get_aq_c_strength(cm->base_qindex, cm->bit_depth); // Clear down the segment map. memset(cpi->segmentation_map, DEFAULT_AQ2_SEG, cm->mi_rows * cm->mi_cols); vp10_clearall_segfeatures(seg); // Segmentation only makes sense if the target bits per SB is above a // threshold. Below this the overheads will usually outweigh any benefit. if (cpi->rc.sb64_target_rate < 256) { vp10_disable_segmentation(seg); return; } vp10_enable_segmentation(seg); // Select delta coding method. seg->abs_delta = SEGMENT_DELTADATA; // Default segment "Q" feature is disabled so it defaults to the baseline Q. vp10_disable_segfeature(seg, DEFAULT_AQ2_SEG, SEG_LVL_ALT_Q); // Use some of the segments for in frame Q adjustment. for (segment = 0; segment < AQ_C_SEGMENTS; ++segment) { int qindex_delta; if (segment == DEFAULT_AQ2_SEG) continue; qindex_delta = vp10_compute_qdelta_by_rate( &cpi->rc, cm->frame_type, cm->base_qindex, aq_c_q_adj_factor[aq_strength][segment], cm->bit_depth); // For AQ complexity mode, we dont allow Q0 in a segment if the base // Q is not 0. Q0 (lossless) implies 4x4 only and in AQ mode 2 a segment // Q delta is sometimes applied without going back around the rd loop. // This could lead to an illegal combination of partition size and q. if ((cm->base_qindex != 0) && ((cm->base_qindex + qindex_delta) == 0)) { qindex_delta = -cm->base_qindex + 1; } if ((cm->base_qindex + qindex_delta) > 0) { vp10_enable_segfeature(seg, segment, SEG_LVL_ALT_Q); vp10_set_segdata(seg, segment, SEG_LVL_ALT_Q, qindex_delta); } } } }
// Select a segment for the current block. // The choice of segment for a block depends on the ratio of the projected // bits for the block vs a target average and its spatial complexity. void vp10_caq_select_segment(VP10_COMP *cpi, MACROBLOCK *mb, BLOCK_SIZE bs, int mi_row, int mi_col, int projected_rate) { VP10_COMMON *const cm = &cpi->common; const int mi_offset = mi_row * cm->mi_cols + mi_col; const int bw = num_8x8_blocks_wide_lookup[BLOCK_64X64]; const int bh = num_8x8_blocks_high_lookup[BLOCK_64X64]; const int xmis = VPXMIN(cm->mi_cols - mi_col, num_8x8_blocks_wide_lookup[bs]); const int ymis = VPXMIN(cm->mi_rows - mi_row, num_8x8_blocks_high_lookup[bs]); int x, y; int i; unsigned char segment; if (0) { segment = DEFAULT_AQ2_SEG; } else { // Rate depends on fraction of a SB64 in frame (xmis * ymis / bw * bh). // It is converted to bits * 256 units. const int target_rate = (cpi->rc.sb64_target_rate * xmis * ymis * 256) / (bw * bh); double logvar; double low_var_thresh; const int aq_strength = get_aq_c_strength(cm->base_qindex, cm->bit_depth); vpx_clear_system_state(); low_var_thresh = (cpi->oxcf.pass == 2) ? VPXMAX(cpi->twopass.mb_av_energy, MIN_DEFAULT_LV_THRESH) : DEFAULT_LV_THRESH; vp10_setup_src_planes(mb, cpi->Source, mi_row, mi_col); logvar = vp10_log_block_var(cpi, mb, bs); segment = AQ_C_SEGMENTS - 1; // Just in case no break out below. for (i = 0; i < AQ_C_SEGMENTS; ++i) { // Test rate against a threshold value and variance against a threshold. // Increasing segment number (higher variance and complexity) = higher Q. if ((projected_rate < target_rate * aq_c_transitions[aq_strength][i]) && (logvar < (low_var_thresh + aq_c_var_thresholds[aq_strength][i]))) { segment = i; break; } } } // Fill in the entires in the segment map corresponding to this SB64. for (y = 0; y < ymis; y++) { for (x = 0; x < xmis; x++) { cpi->segmentation_map[mi_offset + y * cm->mi_cols + x] = segment; } } }
// This function returns the blockiness for the entire frame currently by // looking at all borders in steps of 4. double vp10_get_blockiness(const unsigned char *img1, int img1_pitch, const unsigned char *img2, int img2_pitch, int width, int height) { double blockiness = 0; int i, j; vpx_clear_system_state(); for (i = 0; i < height; i += 4, img1 += img1_pitch * 4, img2 += img2_pitch * 4) { for (j = 0; j < width; j += 4) { if (i > 0 && i < height && j > 0 && j < width) { blockiness += blockiness_vertical(img1 + j, img1_pitch, img2 + j, img2_pitch, 4); blockiness += blockiness_horizontal(img1 + j, img1_pitch, img2 + j, img2_pitch, 4); } } } blockiness /= width * height / 16; return blockiness; }
int vp8_create_decoder_instances(struct frame_buffers *fb, VP8D_CONFIG *oxcf) { /* decoder instance for single thread mode */ fb->pbi[0] = create_decompressor(oxcf); if (!fb->pbi[0]) return VPX_CODEC_ERROR; #if CONFIG_MULTITHREAD if (setjmp(fb->pbi[0]->common.error.jmp)) { vp8_remove_decoder_instances(fb); memset(fb->pbi, 0, sizeof(fb->pbi)); vpx_clear_system_state(); return VPX_CODEC_ERROR; } fb->pbi[0]->common.error.setjmp = 1; fb->pbi[0]->max_threads = oxcf->max_threads; vp8_decoder_create_threads(fb->pbi[0]); fb->pbi[0]->common.error.setjmp = 0; #endif return VPX_CODEC_OK; }
void vp9_update_mbgraph_stats(VP9_COMP *cpi) { VP9_COMMON *const cm = &cpi->common; int i, n_frames = vp9_lookahead_depth(cpi->lookahead); YV12_BUFFER_CONFIG *golden_ref = get_ref_frame_buffer(cpi, GOLDEN_FRAME); assert(golden_ref != NULL); // we need to look ahead beyond where the ARF transitions into // being a GF - so exit if we don't look ahead beyond that if (n_frames <= cpi->rc.frames_till_gf_update_due) return; if (n_frames > MAX_LAG_BUFFERS) n_frames = MAX_LAG_BUFFERS; cpi->mbgraph_n_frames = n_frames; for (i = 0; i < n_frames; i++) { MBGRAPH_FRAME_STATS *frame_stats = &cpi->mbgraph_stats[i]; memset(frame_stats->mb_stats, 0, cm->mb_rows * cm->mb_cols * sizeof(*cpi->mbgraph_stats[i].mb_stats)); } // do motion search to find contribution of each reference to data // later on in this GF group // FIXME really, the GF/last MC search should be done forward, and // the ARF MC search backwards, to get optimal results for MV caching for (i = 0; i < n_frames; i++) { MBGRAPH_FRAME_STATS *frame_stats = &cpi->mbgraph_stats[i]; struct lookahead_entry *q_cur = vp9_lookahead_peek(cpi->lookahead, i); assert(q_cur != NULL); update_mbgraph_frame_stats(cpi, frame_stats, &q_cur->img, golden_ref, cpi->Source); } vpx_clear_system_state(); separate_arf_mbs(cpi); }
void vp9_vaq_frame_setup(VP9_COMP *cpi) { VP9_COMMON *cm = &cpi->common; struct segmentation *seg = &cm->seg; int i; if (cm->frame_type == KEY_FRAME || cpi->refresh_alt_ref_frame || (cpi->refresh_golden_frame && !cpi->rc.is_src_frame_alt_ref)) { vp9_enable_segmentation(seg); vp9_clearall_segfeatures(seg); seg->abs_delta = SEGMENT_DELTADATA; vpx_clear_system_state(); for (i = 0; i < MAX_SEGMENTS; ++i) { int qindex_delta = vp9_compute_qdelta_by_rate(&cpi->rc, cm->frame_type, cm->base_qindex, rate_ratio[i], cm->bit_depth); // We don't allow qindex 0 in a segment if the base value is not 0. // Q index 0 (lossless) implies 4x4 encoding only and in AQ mode a segment // Q delta is sometimes applied without going back around the rd loop. // This could lead to an illegal combination of partition size and q. if ((cm->base_qindex != 0) && ((cm->base_qindex + qindex_delta) == 0)) { qindex_delta = -cm->base_qindex + 1; } // No need to enable SEG_LVL_ALT_Q for this segment. if (rate_ratio[i] == 1.0) { continue; } vp9_set_segdata(seg, i, SEG_LVL_ALT_Q, qindex_delta); vp9_enable_segfeature(seg, i, SEG_LVL_ALT_Q); } } }
void vp9_pack_bitstream(VP9_COMP *cpi, uint8_t *dest, size_t *size) { uint8_t *data = dest; size_t first_part_size, uncompressed_hdr_size; struct vpx_write_bit_buffer wb = {data, 0}; struct vpx_write_bit_buffer saved_wb; write_uncompressed_header(cpi, &wb); saved_wb = wb; vpx_wb_write_literal(&wb, 0, 16); // don't know in advance first part. size uncompressed_hdr_size = vpx_wb_bytes_written(&wb); data += uncompressed_hdr_size; vpx_clear_system_state(); first_part_size = write_compressed_header(cpi, data); data += first_part_size; // TODO(jbb): Figure out what to do if first_part_size > 16 bits. vpx_wb_write_literal(&saved_wb, (int)first_part_size, 16); data += encode_tiles(cpi, data); *size = data - dest; }
int vp10_receive_compressed_data(VP10Decoder *pbi, size_t size, const uint8_t **psource) { VP10_COMMON *volatile const cm = &pbi->common; BufferPool *volatile const pool = cm->buffer_pool; RefCntBuffer *volatile const frame_bufs = cm->buffer_pool->frame_bufs; const uint8_t *source = *psource; int retcode = 0; cm->error.error_code = VPX_CODEC_OK; if (size == 0) { // This is used to signal that we are missing frames. // We do not know if the missing frame(s) was supposed to update // any of the reference buffers, but we act conservative and // mark only the last buffer as corrupted. // // TODO(jkoleszar): Error concealment is undefined and non-normative // at this point, but if it becomes so, [0] may not always be the correct // thing to do here. if (cm->frame_refs[0].idx > 0) { assert(cm->frame_refs[0].buf != NULL); cm->frame_refs[0].buf->corrupted = 1; } } pbi->ready_for_new_data = 0; // Check if the previous frame was a frame without any references to it. // Release frame buffer if not decoding in frame parallel mode. if (!pbi->frame_parallel_decode && cm->new_fb_idx >= 0 && frame_bufs[cm->new_fb_idx].ref_count == 0) pool->release_fb_cb(pool->cb_priv, &frame_bufs[cm->new_fb_idx].raw_frame_buffer); // Find a free frame buffer. Return error if can not find any. cm->new_fb_idx = get_free_fb(cm); if (cm->new_fb_idx == INVALID_IDX) return VPX_CODEC_MEM_ERROR; // Assign a MV array to the frame buffer. cm->cur_frame = &pool->frame_bufs[cm->new_fb_idx]; pbi->hold_ref_buf = 0; if (pbi->frame_parallel_decode) { VPxWorker *const worker = pbi->frame_worker_owner; vp10_frameworker_lock_stats(worker); frame_bufs[cm->new_fb_idx].frame_worker_owner = worker; // Reset decoding progress. pbi->cur_buf = &frame_bufs[cm->new_fb_idx]; pbi->cur_buf->row = -1; pbi->cur_buf->col = -1; vp10_frameworker_unlock_stats(worker); } else { pbi->cur_buf = &frame_bufs[cm->new_fb_idx]; } if (setjmp(cm->error.jmp)) { const VPxWorkerInterface *const winterface = vpx_get_worker_interface(); int i; cm->error.setjmp = 0; pbi->ready_for_new_data = 1; // Synchronize all threads immediately as a subsequent decode call may // cause a resize invalidating some allocations. winterface->sync(&pbi->lf_worker); for (i = 0; i < pbi->num_tile_workers; ++i) { winterface->sync(&pbi->tile_workers[i]); } lock_buffer_pool(pool); // Release all the reference buffers if worker thread is holding them. if (pbi->hold_ref_buf == 1) { int ref_index = 0, mask; for (mask = pbi->refresh_frame_flags; mask; mask >>= 1) { const int old_idx = cm->ref_frame_map[ref_index]; // Current thread releases the holding of reference frame. decrease_ref_count(old_idx, frame_bufs, pool); // Release the reference frame in reference map. if ((mask & 1) && old_idx >= 0) { decrease_ref_count(old_idx, frame_bufs, pool); } ++ref_index; } // Current thread releases the holding of reference frame. for (; ref_index < REF_FRAMES && !cm->show_existing_frame; ++ref_index) { const int old_idx = cm->ref_frame_map[ref_index]; decrease_ref_count(old_idx, frame_bufs, pool); } pbi->hold_ref_buf = 0; } // Release current frame. decrease_ref_count(cm->new_fb_idx, frame_bufs, pool); unlock_buffer_pool(pool); vpx_clear_system_state(); return -1; }
int vp8dx_receive_compressed_data(VP8D_COMP *pbi, size_t size, const uint8_t *source, int64_t time_stamp) { VP8_COMMON *cm = &pbi->common; int retcode = -1; (void)size; (void)source; pbi->common.error.error_code = VPX_CODEC_OK; retcode = check_fragments_for_errors(pbi); if (retcode <= 0) return retcode; cm->new_fb_idx = get_free_fb(cm); /* setup reference frames for vp8_decode_frame */ pbi->dec_fb_ref[INTRA_FRAME] = &cm->yv12_fb[cm->new_fb_idx]; pbi->dec_fb_ref[LAST_FRAME] = &cm->yv12_fb[cm->lst_fb_idx]; pbi->dec_fb_ref[GOLDEN_FRAME] = &cm->yv12_fb[cm->gld_fb_idx]; pbi->dec_fb_ref[ALTREF_FRAME] = &cm->yv12_fb[cm->alt_fb_idx]; if (setjmp(pbi->common.error.jmp)) { /* We do not know if the missing frame(s) was supposed to update * any of the reference buffers, but we act conservative and * mark only the last buffer as corrupted. */ cm->yv12_fb[cm->lst_fb_idx].corrupted = 1; if (cm->fb_idx_ref_cnt[cm->new_fb_idx] > 0) { cm->fb_idx_ref_cnt[cm->new_fb_idx]--; } goto decode_exit; } pbi->common.error.setjmp = 1; retcode = vp8_decode_frame(pbi); if (retcode < 0) { if (cm->fb_idx_ref_cnt[cm->new_fb_idx] > 0) { cm->fb_idx_ref_cnt[cm->new_fb_idx]--; } pbi->common.error.error_code = VPX_CODEC_ERROR; goto decode_exit; } if (swap_frame_buffers(cm)) { pbi->common.error.error_code = VPX_CODEC_ERROR; goto decode_exit; } vpx_clear_system_state(); if (cm->show_frame) { cm->current_video_frame++; cm->show_frame_mi = cm->mi; } #if CONFIG_ERROR_CONCEALMENT /* swap the mode infos to storage for future error concealment */ if (pbi->ec_enabled && pbi->common.prev_mi) { MODE_INFO *tmp = pbi->common.prev_mi; int row, col; pbi->common.prev_mi = pbi->common.mi; pbi->common.mi = tmp; /* Propagate the segment_ids to the next frame */ for (row = 0; row < pbi->common.mb_rows; ++row) { for (col = 0; col < pbi->common.mb_cols; ++col) { const int i = row * pbi->common.mode_info_stride + col; pbi->common.mi[i].mbmi.segment_id = pbi->common.prev_mi[i].mbmi.segment_id; } } } #endif pbi->ready_for_new_data = 0; pbi->last_time_stamp = time_stamp; decode_exit: pbi->common.error.setjmp = 0; vpx_clear_system_state(); return retcode; }