Exemple #1
0
static int vdpau_h264_start_frame(AVCodecContext *avctx,
                                  const uint8_t *buffer, uint32_t size)
{
    H264Context * const h = avctx->priv_data;
    const PPS *pps = h->ps.pps;
    const SPS *sps = h->ps.sps;
    H264Picture *pic = h->cur_pic_ptr;
    struct vdpau_picture_context *pic_ctx = pic->hwaccel_picture_private;
    VdpPictureInfoH264 *info = &pic_ctx->info.h264;
#ifdef VDP_DECODER_PROFILE_H264_HIGH_444_PREDICTIVE
    VdpPictureInfoH264Predictive *info2 = &pic_ctx->info.h264_predictive;
#endif

    /* init VdpPictureInfoH264 */
    info->slice_count                            = 0;
    info->field_order_cnt[0]                     = h264_foc(pic->field_poc[0]);
    info->field_order_cnt[1]                     = h264_foc(pic->field_poc[1]);
    info->is_reference                           = h->nal_ref_idc != 0;
    info->frame_num                              = h->poc.frame_num;
    info->field_pic_flag                         = h->picture_structure != PICT_FRAME;
    info->bottom_field_flag                      = h->picture_structure == PICT_BOTTOM_FIELD;
    info->num_ref_frames                         = sps->ref_frame_count;
    info->mb_adaptive_frame_field_flag           = sps->mb_aff && !info->field_pic_flag;
    info->constrained_intra_pred_flag            = pps->constrained_intra_pred;
    info->weighted_pred_flag                     = pps->weighted_pred;
    info->weighted_bipred_idc                    = pps->weighted_bipred_idc;
    info->frame_mbs_only_flag                    = sps->frame_mbs_only_flag;
    info->transform_8x8_mode_flag                = pps->transform_8x8_mode;
    info->chroma_qp_index_offset                 = pps->chroma_qp_index_offset[0];
    info->second_chroma_qp_index_offset          = pps->chroma_qp_index_offset[1];
    info->pic_init_qp_minus26                    = pps->init_qp - 26;
    info->num_ref_idx_l0_active_minus1           = pps->ref_count[0] - 1;
    info->num_ref_idx_l1_active_minus1           = pps->ref_count[1] - 1;
    info->log2_max_frame_num_minus4              = sps->log2_max_frame_num - 4;
    info->pic_order_cnt_type                     = sps->poc_type;
    info->log2_max_pic_order_cnt_lsb_minus4      = sps->poc_type ? 0 : sps->log2_max_poc_lsb - 4;
    info->delta_pic_order_always_zero_flag       = sps->delta_pic_order_always_zero_flag;
    info->direct_8x8_inference_flag              = sps->direct_8x8_inference_flag;
#ifdef VDP_DECODER_PROFILE_H264_HIGH_444_PREDICTIVE
    info2->qpprime_y_zero_transform_bypass_flag  = sps->transform_bypass;
    info2->separate_colour_plane_flag            = sps->residual_color_transform_flag;
#endif
    info->entropy_coding_mode_flag               = pps->cabac;
    info->pic_order_present_flag                 = pps->pic_order_present;
    info->deblocking_filter_control_present_flag = pps->deblocking_filter_parameters_present;
    info->redundant_pic_cnt_present_flag         = pps->redundant_pic_cnt_present;

    memcpy(info->scaling_lists_4x4, pps->scaling_matrix4,
           sizeof(info->scaling_lists_4x4));
    memcpy(info->scaling_lists_8x8[0], pps->scaling_matrix8[0],
           sizeof(info->scaling_lists_8x8[0]));
    memcpy(info->scaling_lists_8x8[1], pps->scaling_matrix8[3],
           sizeof(info->scaling_lists_8x8[1]));

    vdpau_h264_set_reference_frames(avctx);

    return ff_vdpau_common_start_frame(pic_ctx, buffer, size);
}
Exemple #2
0
static int vdpau_mpeg4_start_frame(AVCodecContext *avctx,
                                   const uint8_t *buffer, uint32_t size)
{
    Mpeg4DecContext *ctx = avctx->priv_data;
    MpegEncContext * const s = &ctx->m;
    Picture *pic             = s->current_picture_ptr;
    struct vdpau_picture_context *pic_ctx = pic->hwaccel_picture_private;
    VdpPictureInfoMPEG4Part2 *info = &pic_ctx->info.mpeg4;
    VdpVideoSurface ref;
    int i;

    /* fill VdpPictureInfoMPEG4Part2 struct */
    info->forward_reference  = VDP_INVALID_HANDLE;
    info->backward_reference = VDP_INVALID_HANDLE;
    info->vop_coding_type    = 0;

    switch (s->pict_type) {
    case AV_PICTURE_TYPE_B:
        ref = ff_vdpau_get_surface_id(s->next_picture.f);
        assert(ref != VDP_INVALID_HANDLE);
        info->backward_reference = ref;
        info->vop_coding_type    = 2;
        /* fall-through */
    case AV_PICTURE_TYPE_P:
        ref = ff_vdpau_get_surface_id(s->last_picture.f);
        assert(ref != VDP_INVALID_HANDLE);
        info->forward_reference  = ref;
    }

    info->trd[0]                            = s->pp_time;
    info->trb[0]                            = s->pb_time;
    info->trd[1]                            = s->pp_field_time >> 1;
    info->trb[1]                            = s->pb_field_time >> 1;
    info->vop_time_increment_resolution     = s->avctx->framerate.num;
    info->vop_fcode_forward                 = s->f_code;
    info->vop_fcode_backward                = s->b_code;
    info->resync_marker_disable             = !ctx->resync_marker;
    info->interlaced                        = !s->progressive_sequence;
    info->quant_type                        = s->mpeg_quant;
    info->quarter_sample                    = s->quarter_sample;
    info->short_video_header                = avctx->codec->id == AV_CODEC_ID_H263;
    info->rounding_control                  = s->no_rounding;
    info->alternate_vertical_scan_flag      = s->alternate_scan;
    info->top_field_first                   = s->top_field_first;
    for (i = 0; i < 64; ++i) {
        info->intra_quantizer_matrix[i]     = s->intra_matrix[i];
        info->non_intra_quantizer_matrix[i] = s->inter_matrix[i];
    }

    ff_vdpau_common_start_frame(pic_ctx, buffer, size);
    return ff_vdpau_add_buffer(pic_ctx, buffer, size);
}
Exemple #3
0
static int vdpau_mpeg_start_frame(AVCodecContext *avctx,
                                  const uint8_t *buffer, uint32_t size)
{
    MpegEncContext * const s = avctx->priv_data;
    Picture *pic             = s->current_picture_ptr;
    struct vdpau_picture_context *pic_ctx = pic->hwaccel_picture_private;
    VdpPictureInfoMPEG1Or2 *info = &pic_ctx->info.mpeg;
    VdpVideoSurface ref;
    int i;

    /* fill VdpPictureInfoMPEG1Or2 struct */
    info->forward_reference  = VDP_INVALID_HANDLE;
    info->backward_reference = VDP_INVALID_HANDLE;

    switch (s->pict_type) {
    case AV_PICTURE_TYPE_B:
        ref = ff_vdpau_get_surface_id(&s->next_picture.f);
        assert(ref != VDP_INVALID_HANDLE);
        info->backward_reference = ref;
        /* fall through to forward prediction */
    case AV_PICTURE_TYPE_P:
        ref = ff_vdpau_get_surface_id(&s->last_picture.f);
        info->forward_reference  = ref;
    }

    info->slice_count                = 0;
    info->picture_structure          = s->picture_structure;
    info->picture_coding_type        = s->pict_type;
    info->intra_dc_precision         = s->intra_dc_precision;
    info->frame_pred_frame_dct       = s->frame_pred_frame_dct;
    info->concealment_motion_vectors = s->concealment_motion_vectors;
    info->intra_vlc_format           = s->intra_vlc_format;
    info->alternate_scan             = s->alternate_scan;
    info->q_scale_type               = s->q_scale_type;
    info->top_field_first            = s->top_field_first;
    // Both for MPEG-1 only, zero for MPEG-2:
    info->full_pel_forward_vector    = s->full_pel[0];
    info->full_pel_backward_vector   = s->full_pel[1];
    // For MPEG-1 fill both horizontal & vertical:
    info->f_code[0][0]               = s->mpeg_f_code[0][0];
    info->f_code[0][1]               = s->mpeg_f_code[0][1];
    info->f_code[1][0]               = s->mpeg_f_code[1][0];
    info->f_code[1][1]               = s->mpeg_f_code[1][1];
    for (i = 0; i < 64; ++i) {
        info->intra_quantizer_matrix[i]     = s->intra_matrix[i];
        info->non_intra_quantizer_matrix[i] = s->inter_matrix[i];
    }

    return ff_vdpau_common_start_frame(pic_ctx, buffer, size);
}
Exemple #4
0
static int vdpau_h264_start_frame(AVCodecContext *avctx,
                                  const uint8_t *buffer, uint32_t size)
{
    H264Context * const h = avctx->priv_data;
    AVVDPAUContext *hwctx = avctx->hwaccel_context;
    MpegEncContext * const s = &h->s;
    VdpPictureInfoH264 *info = &hwctx->info.h264;
    Picture *pic = s->current_picture_ptr;

    /* init VdpPictureInfoH264 */
    info->slice_count                            = 0;
    info->field_order_cnt[0]                     = h264_foc(pic->field_poc[0]);
    info->field_order_cnt[1]                     = h264_foc(pic->field_poc[1]);
    info->is_reference                           = h->nal_ref_idc != 0;
    info->frame_num                              = h->frame_num;
    info->field_pic_flag                         = s->picture_structure != PICT_FRAME;
    info->bottom_field_flag                      = s->picture_structure == PICT_BOTTOM_FIELD;
    info->num_ref_frames                         = h->sps.ref_frame_count;
    info->mb_adaptive_frame_field_flag           = h->sps.mb_aff && !info->field_pic_flag;
    info->constrained_intra_pred_flag            = h->pps.constrained_intra_pred;
    info->weighted_pred_flag                     = h->pps.weighted_pred;
    info->weighted_bipred_idc                    = h->pps.weighted_bipred_idc;
    info->frame_mbs_only_flag                    = h->sps.frame_mbs_only_flag;
    info->transform_8x8_mode_flag                = h->pps.transform_8x8_mode;
    info->chroma_qp_index_offset                 = h->pps.chroma_qp_index_offset[0];
    info->second_chroma_qp_index_offset          = h->pps.chroma_qp_index_offset[1];
    info->pic_init_qp_minus26                    = h->pps.init_qp - 26;
    info->num_ref_idx_l0_active_minus1           = h->pps.ref_count[0] - 1;
    info->num_ref_idx_l1_active_minus1           = h->pps.ref_count[1] - 1;
    info->log2_max_frame_num_minus4              = h->sps.log2_max_frame_num - 4;
    info->pic_order_cnt_type                     = h->sps.poc_type;
    info->log2_max_pic_order_cnt_lsb_minus4      = h->sps.poc_type ? 0 : h->sps.log2_max_poc_lsb - 4;
    info->delta_pic_order_always_zero_flag       = h->sps.delta_pic_order_always_zero_flag;
    info->direct_8x8_inference_flag              = h->sps.direct_8x8_inference_flag;
    info->entropy_coding_mode_flag               = h->pps.cabac;
    info->pic_order_present_flag                 = h->pps.pic_order_present;
    info->deblocking_filter_control_present_flag = h->pps.deblocking_filter_parameters_present;
    info->redundant_pic_cnt_present_flag         = h->pps.redundant_pic_cnt_present;

    memcpy(info->scaling_lists_4x4, h->pps.scaling_matrix4,
           sizeof(info->scaling_lists_4x4));
    memcpy(info->scaling_lists_8x8[0], h->pps.scaling_matrix8[0],
           sizeof(info->scaling_lists_8x8[0]));
    memcpy(info->scaling_lists_8x8[1], h->pps.scaling_matrix8[3],
           sizeof(info->scaling_lists_8x8[1]));

    vdpau_h264_set_reference_frames(avctx);

    return ff_vdpau_common_start_frame(avctx, buffer, size);
}
Exemple #5
0
static int vdpau_hevc_start_frame(AVCodecContext *avctx,
                                  const uint8_t *buffer, uint32_t size)
{
    HEVCContext *h = avctx->priv_data;
    HEVCFrame *pic = h->ref;
    struct vdpau_picture_context *pic_ctx = pic->hwaccel_picture_private;

    VdpPictureInfoHEVC *info = &pic_ctx->info.hevc;

    const HEVCSPS *sps = h->ps.sps;
    const HEVCPPS *pps = h->ps.pps;
    const SliceHeader *sh = &h->sh;
    const ScalingList *sl = pps->scaling_list_data_present_flag ?
                            &pps->scaling_list : &sps->scaling_list;

    /* init VdpPictureInfoHEVC */

    /* SPS */
    info->chroma_format_idc = sps->chroma_format_idc;
    info->separate_colour_plane_flag = sps->separate_colour_plane_flag;
    info->pic_width_in_luma_samples = sps->width;
    info->pic_height_in_luma_samples = sps->height;
    info->bit_depth_luma_minus8 = sps->bit_depth - 8;
    info->bit_depth_chroma_minus8 = sps->bit_depth - 8;
    info->log2_max_pic_order_cnt_lsb_minus4 = sps->log2_max_poc_lsb - 4;
    /* Provide the value corresponding to the nuh_temporal_id of the frame
       to be decoded. */
    info->sps_max_dec_pic_buffering_minus1 = sps->temporal_layer[sps->max_sub_layers - 1].max_dec_pic_buffering - 1;
    info->log2_min_luma_coding_block_size_minus3 = sps->log2_min_cb_size - 3;
    info->log2_diff_max_min_luma_coding_block_size = sps->log2_diff_max_min_coding_block_size;
    info->log2_min_transform_block_size_minus2 = sps->log2_min_tb_size - 2;
    info->log2_diff_max_min_transform_block_size = sps->log2_max_trafo_size - sps->log2_min_tb_size;
    info->max_transform_hierarchy_depth_inter = sps->max_transform_hierarchy_depth_inter;
    info->max_transform_hierarchy_depth_intra = sps->max_transform_hierarchy_depth_intra;
    info->scaling_list_enabled_flag = sps->scaling_list_enable_flag;
    /* Scaling lists, in diagonal order, to be used for this frame. */
    for (size_t i = 0; i < 6; i++) {
        for (size_t j = 0; j < 16; j++) {
            /* Scaling List for 4x4 quantization matrix,
               indexed as ScalingList4x4[matrixId][i]. */
            uint8_t pos = 4 * ff_hevc_diag_scan4x4_y[j] + ff_hevc_diag_scan4x4_x[j];
            info->ScalingList4x4[i][j] = sl->sl[0][i][pos];
        }
        for (size_t j = 0; j < 64; j++) {
            uint8_t pos = 8 * ff_hevc_diag_scan8x8_y[j] + ff_hevc_diag_scan8x8_x[j];
            /* Scaling List for 8x8 quantization matrix,
               indexed as ScalingList8x8[matrixId][i]. */
            info->ScalingList8x8[i][j] = sl->sl[1][i][pos];
            /* Scaling List for 16x16 quantization matrix,
               indexed as ScalingList16x16[matrixId][i]. */
            info->ScalingList16x16[i][j] = sl->sl[2][i][pos];
            if (i < 2) {
                /* Scaling List for 32x32 quantization matrix,
                   indexed as ScalingList32x32[matrixId][i]. */
                info->ScalingList32x32[i][j] = sl->sl[3][i * 3][pos];
            }
        }
        /* Scaling List DC Coefficients for 16x16,
           indexed as ScalingListDCCoeff16x16[matrixId]. */
        info->ScalingListDCCoeff16x16[i] = sl->sl_dc[0][i];
        if (i < 2) {
            /* Scaling List DC Coefficients for 32x32,
               indexed as ScalingListDCCoeff32x32[matrixId]. */
            info->ScalingListDCCoeff32x32[i] = sl->sl_dc[1][i * 3];
        }
    }
    info->amp_enabled_flag = sps->amp_enabled_flag;
    info->sample_adaptive_offset_enabled_flag = sps->sao_enabled;
    info->pcm_enabled_flag = sps->pcm_enabled_flag;
    if (info->pcm_enabled_flag) {
        /* Only needs to be set if pcm_enabled_flag is set. Ignored otherwise. */
        info->pcm_sample_bit_depth_luma_minus1 = sps->pcm.bit_depth - 1;
        /* Only needs to be set if pcm_enabled_flag is set. Ignored otherwise. */
        info->pcm_sample_bit_depth_chroma_minus1 = sps->pcm.bit_depth_chroma - 1;
        /* Only needs to be set if pcm_enabled_flag is set. Ignored otherwise. */
        info->log2_min_pcm_luma_coding_block_size_minus3 = sps->pcm.log2_min_pcm_cb_size - 3;
        /* Only needs to be set if pcm_enabled_flag is set. Ignored otherwise. */
        info->log2_diff_max_min_pcm_luma_coding_block_size = sps->pcm.log2_max_pcm_cb_size - sps->pcm.log2_min_pcm_cb_size;
        /* Only needs to be set if pcm_enabled_flag is set. Ignored otherwise. */
        info->pcm_loop_filter_disabled_flag = sps->pcm.loop_filter_disable_flag;
    }
    /* Per spec, when zero, assume short_term_ref_pic_set_sps_flag
       is also zero. */
    info->num_short_term_ref_pic_sets = sps->nb_st_rps;
    info->long_term_ref_pics_present_flag = sps->long_term_ref_pics_present_flag;
    /* Only needed if long_term_ref_pics_present_flag is set. Ignored
       otherwise. */
    info->num_long_term_ref_pics_sps = sps->num_long_term_ref_pics_sps;
    info->sps_temporal_mvp_enabled_flag = sps->sps_temporal_mvp_enabled_flag;
    info->strong_intra_smoothing_enabled_flag = sps->sps_strong_intra_smoothing_enable_flag;

    /* Copy the HEVC Picture Parameter Set bitstream fields. */
    info->dependent_slice_segments_enabled_flag = pps->dependent_slice_segments_enabled_flag;
    info->output_flag_present_flag = pps->output_flag_present_flag;
    info->num_extra_slice_header_bits = pps->num_extra_slice_header_bits;
    info->sign_data_hiding_enabled_flag = pps->sign_data_hiding_flag;
    info->cabac_init_present_flag = pps->cabac_init_present_flag;
    info->num_ref_idx_l0_default_active_minus1 = pps->num_ref_idx_l0_default_active - 1;
    info->num_ref_idx_l1_default_active_minus1 = pps->num_ref_idx_l1_default_active - 1;
    info->init_qp_minus26 = pps->pic_init_qp_minus26;
    info->constrained_intra_pred_flag = pps->constrained_intra_pred_flag;
    info->transform_skip_enabled_flag = pps->transform_skip_enabled_flag;
    info->cu_qp_delta_enabled_flag = pps->cu_qp_delta_enabled_flag;
    /* Only needed if cu_qp_delta_enabled_flag is set. Ignored otherwise. */
    info->diff_cu_qp_delta_depth = pps->diff_cu_qp_delta_depth;
    info->pps_cb_qp_offset = pps->cb_qp_offset;
    info->pps_cr_qp_offset = pps->cr_qp_offset;
    info->pps_slice_chroma_qp_offsets_present_flag = pps->pic_slice_level_chroma_qp_offsets_present_flag;
    info->weighted_pred_flag = pps->weighted_pred_flag;
    info->weighted_bipred_flag = pps->weighted_bipred_flag;
    info->transquant_bypass_enabled_flag = pps->transquant_bypass_enable_flag;
    info->tiles_enabled_flag = pps->tiles_enabled_flag;
    info->entropy_coding_sync_enabled_flag = pps->entropy_coding_sync_enabled_flag;
    if (info->tiles_enabled_flag) {
        /* Only valid if tiles_enabled_flag is set. Ignored otherwise. */
        info->num_tile_columns_minus1 = pps->num_tile_columns - 1;
        /* Only valid if tiles_enabled_flag is set. Ignored otherwise. */
        info->num_tile_rows_minus1 = pps->num_tile_rows - 1;
        /* Only valid if tiles_enabled_flag is set. Ignored otherwise. */
        info->uniform_spacing_flag = pps->uniform_spacing_flag;
        /* Only need to set 0..num_tile_columns_minus1. The struct
           definition reserves up to the maximum of 20. Invalid values are
           ignored. */
        for (ssize_t i = 0; i < pps->num_tile_columns; i++) {
            info->column_width_minus1[i] = pps->column_width[i] - 1;
        }
        /* Only need to set 0..num_tile_rows_minus1. The struct
           definition reserves up to the maximum of 22. Invalid values are
           ignored.*/
        for (ssize_t i = 0; i < pps->num_tile_rows; i++) {
            info->row_height_minus1[i] = pps->row_height[i] - 1;
        }
        /* Only needed if tiles_enabled_flag is set. Invalid values are
           ignored. */
        info->loop_filter_across_tiles_enabled_flag = pps->loop_filter_across_tiles_enabled_flag;
    }
    info->pps_loop_filter_across_slices_enabled_flag = pps->seq_loop_filter_across_slices_enabled_flag;
    info->deblocking_filter_control_present_flag = pps->deblocking_filter_control_present_flag;
    /* Only valid if deblocking_filter_control_present_flag is set. Ignored
       otherwise. */
    info->deblocking_filter_override_enabled_flag = pps->deblocking_filter_override_enabled_flag;
    /* Only valid if deblocking_filter_control_present_flag is set. Ignored
       otherwise. */
    info->pps_deblocking_filter_disabled_flag = pps->disable_dbf;
    /* Only valid if deblocking_filter_control_present_flag is set and
       pps_deblocking_filter_disabled_flag is not set. Ignored otherwise.*/
    info->pps_beta_offset_div2 = pps->beta_offset / 2;
    /* Only valid if deblocking_filter_control_present_flag is set and
       pps_deblocking_filter_disabled_flag is not set. Ignored otherwise. */
    info->pps_tc_offset_div2 = pps->tc_offset / 2;
    info->lists_modification_present_flag = pps->lists_modification_present_flag;
    info->log2_parallel_merge_level_minus2 = pps->log2_parallel_merge_level - 2;
    info->slice_segment_header_extension_present_flag = pps->slice_header_extension_present_flag;

    /* Set to 1 if nal_unit_type is equal to IDR_W_RADL or IDR_N_LP.
       Set to zero otherwise. */
    info->IDRPicFlag = IS_IDR(h);
    /* Set to 1 if nal_unit_type in the range of BLA_W_LP to
       RSV_IRAP_VCL23, inclusive. Set to zero otherwise.*/
    info->RAPPicFlag = IS_IRAP(h);
    /* See section 7.4.7.1 of the specification. */
    info->CurrRpsIdx = sps->nb_st_rps;
    if (sh->short_term_ref_pic_set_sps_flag == 1) {
        for (size_t i = 0; i < sps->nb_st_rps; i++) {
            if (sh->short_term_rps == &sps->st_rps[i]) {
                info->CurrRpsIdx = i;
                break;
            }
        }
    }
    /* See section 7.4.7.2 of the specification. */
    info->NumPocTotalCurr = ff_hevc_frame_nb_refs(h);
    if (sh->short_term_ref_pic_set_sps_flag == 0 && sh->short_term_rps) {
        /* Corresponds to specification field, NumDeltaPocs[RefRpsIdx].
           Only applicable when short_term_ref_pic_set_sps_flag == 0.
           Implementations will ignore this value in other cases. See 7.4.8. */
        info->NumDeltaPocsOfRefRpsIdx = sh->short_term_rps->rps_idx_num_delta_pocs;
    }
    /* Section 7.6.3.1 of the H.265/HEVC Specification defines the syntax of
       the slice_segment_header. This header contains information that
       some VDPAU implementations may choose to skip. The VDPAU API
       requires client applications to track the number of bits used in the
       slice header for structures associated with short term and long term
       reference pictures. First, VDPAU requires the number of bits used by
       the short_term_ref_pic_set array in the slice_segment_header. */
    info->NumShortTermPictureSliceHeaderBits = sh->short_term_ref_pic_set_size;
    /* Second, VDPAU requires the number of bits used for long term reference
       pictures in the slice_segment_header. This is equal to the number
       of bits used for the contents of the block beginning with
       "if(long_term_ref_pics_present_flag)". */
    info->NumLongTermPictureSliceHeaderBits = sh->long_term_ref_pic_set_size;

    /* The value of PicOrderCntVal of the picture in the access unit
       containing the SEI message. The picture being decoded. */
    info->CurrPicOrderCntVal = h->poc;

    /* Slice Decoding Process - Reference Picture Sets */
    for (size_t i = 0; i < 16; i++) {
        info->RefPics[i] = VDP_INVALID_HANDLE;
        info->PicOrderCntVal[i] = 0;
        info->IsLongTerm[i] = 0;
    }
    for (size_t i = 0, j = 0; i < FF_ARRAY_ELEMS(h->DPB); i++) {
        const HEVCFrame *frame = &h->DPB[i];
        if (frame != h->ref && (frame->flags & (HEVC_FRAME_FLAG_LONG_REF |
                                                HEVC_FRAME_FLAG_SHORT_REF))) {
            if (j > 15) {
                av_log(avctx, AV_LOG_WARNING,
                     "VDPAU only supports up to 16 references in the DPB. "
                     "This frame may not be decoded correctly.\n");
                break;
            }
            /* Array of video reference surfaces.
               Set any unused positions to VDP_INVALID_HANDLE. */
            info->RefPics[j] = ff_vdpau_get_surface_id(frame->frame);
            /* Array of picture order counts. These correspond to positions
               in the RefPics array. */
            info->PicOrderCntVal[j] = frame->poc;
            /* Array used to specify whether a particular RefPic is
               a long term reference. A value of "1" indicates a long-term
               reference. */
            // XXX: Setting this caused glitches in the nvidia implementation
            // Always setting it to zero, produces correct results
            //info->IsLongTerm[j] = frame->flags & HEVC_FRAME_FLAG_LONG_REF;
            info->IsLongTerm[j] = 0;
            j++;
        }
    }
    /* Copy of specification field, see Section 8.3.2 of the
       H.265/HEVC Specification. */
    info->NumPocStCurrBefore = h->rps[ST_CURR_BEF].nb_refs;
    if (info->NumPocStCurrBefore > 8) {
        av_log(avctx, AV_LOG_WARNING,
             "VDPAU only supports up to 8 references in StCurrBefore. "
             "This frame may not be decoded correctly.\n");
        info->NumPocStCurrBefore = 8;
    }
    /* Copy of specification field, see Section 8.3.2 of the
       H.265/HEVC Specification. */
    info->NumPocStCurrAfter = h->rps[ST_CURR_AFT].nb_refs;
    if (info->NumPocStCurrAfter > 8) {
        av_log(avctx, AV_LOG_WARNING,
             "VDPAU only supports up to 8 references in StCurrAfter. "
             "This frame may not be decoded correctly.\n");
        info->NumPocStCurrAfter = 8;
    }
    /* Copy of specification field, see Section 8.3.2 of the
       H.265/HEVC Specification. */
    info->NumPocLtCurr = h->rps[LT_CURR].nb_refs;
    if (info->NumPocLtCurr > 8) {
        av_log(avctx, AV_LOG_WARNING,
             "VDPAU only supports up to 8 references in LtCurr. "
             "This frame may not be decoded correctly.\n");
        info->NumPocLtCurr = 8;
    }
    /* Reference Picture Set list, one of the short-term RPS. These
       correspond to positions in the RefPics array. */
    for (ssize_t i = 0, j = 0; i < h->rps[ST_CURR_BEF].nb_refs; i++) {
        HEVCFrame *frame = h->rps[ST_CURR_BEF].ref[i];
        if (frame) {
            uint8_t found = 0;
            uintptr_t id = ff_vdpau_get_surface_id(frame->frame);
            for (size_t k = 0; k < 16; k++) {
                if (id == info->RefPics[k]) {
                    info->RefPicSetStCurrBefore[j] = k;
                    j++;
                    found = 1;
                    break;
                }
            }
            if (!found) {
                av_log(avctx, AV_LOG_WARNING, "missing surface: %p\n",
                       (void *)id);
            }
        } else {
            av_log(avctx, AV_LOG_WARNING, "missing STR Before frame: %zd\n", i);
        }
    }
    /* Reference Picture Set list, one of the short-term RPS. These
       correspond to positions in the RefPics array. */
    for (ssize_t i = 0, j = 0; i < h->rps[ST_CURR_AFT].nb_refs; i++) {
        HEVCFrame *frame = h->rps[ST_CURR_AFT].ref[i];
        if (frame) {
            uint8_t found = 0;
            uintptr_t id = ff_vdpau_get_surface_id(frame->frame);
            for (size_t k = 0; k < 16; k++) {
                if (id == info->RefPics[k]) {
                    info->RefPicSetStCurrAfter[j] = k;
                    j++;
                    found = 1;
                    break;
                }
            }
            if (!found) {
                av_log(avctx, AV_LOG_WARNING, "missing surface: %p\n",
                       (void *)id);
            }
        } else {
            av_log(avctx, AV_LOG_WARNING, "missing STR After frame: %zd\n", i);
        }
    }
    /* Reference Picture Set list, one of the long-term RPS. These
       correspond to positions in the RefPics array. */
    for (ssize_t i = 0, j = 0; i < h->rps[LT_CURR].nb_refs; i++) {
        HEVCFrame *frame = h->rps[LT_CURR].ref[i];
        if (frame) {
            uint8_t found = 0;
            uintptr_t id = ff_vdpau_get_surface_id(frame->frame);
            for (size_t k = 0; k < 16; k++) {
                if (id == info->RefPics[k]) {
                    info->RefPicSetLtCurr[j] = k;
                    j++;
                    found = 1;
                    break;
                }
            }
            if (!found) {
                av_log(avctx, AV_LOG_WARNING, "missing surface: %p\n",
                       (void *)id);
            }
        } else {
            av_log(avctx, AV_LOG_WARNING, "missing LTR frame: %zd\n", i);
        }
    }

    return ff_vdpau_common_start_frame(pic_ctx, buffer, size);
}
Exemple #6
0
static int vdpau_vc1_start_frame(AVCodecContext *avctx,
                                 const uint8_t *buffer, uint32_t size)
{
    VC1Context * const v  = avctx->priv_data;
    MpegEncContext * const s = &v->s;
    Picture *pic          = s->current_picture_ptr;
    struct vdpau_picture_context *pic_ctx = pic->hwaccel_picture_private;
    VdpPictureInfoVC1 *info = &pic_ctx->info.vc1;
    VdpVideoSurface ref;

    /*  fill LvPictureInfoVC1 struct */
    info->forward_reference  = VDP_INVALID_HANDLE;
    info->backward_reference = VDP_INVALID_HANDLE;

    switch (s->pict_type) {
    case AV_PICTURE_TYPE_B:
        if (s->next_picture_ptr) {
        ref = ff_vdpau_get_surface_id(s->next_picture.f);
        assert(ref != VDP_INVALID_HANDLE);
        info->backward_reference = ref;
        }
        /* fall-through */
    case AV_PICTURE_TYPE_P:
        if (s->last_picture_ptr) {
        ref = ff_vdpau_get_surface_id(s->last_picture.f);
        assert(ref != VDP_INVALID_HANDLE);
        info->forward_reference  = ref;
        }
    }

    info->slice_count       = 0;
    if (v->bi_type)
        info->picture_type  = 4;
    else
        info->picture_type  = s->pict_type - 1 + s->pict_type / 3;

    info->frame_coding_mode = v->fcm ? (v->fcm + 1) : 0;
    info->postprocflag      = v->postprocflag;
    info->pulldown          = v->broadcast;
    info->interlace         = v->interlace;
    info->tfcntrflag        = v->tfcntrflag;
    info->finterpflag       = v->finterpflag;
    info->psf               = v->psf;
    info->dquant            = v->dquant;
    info->panscan_flag      = v->panscanflag;
    info->refdist_flag      = v->refdist_flag;
    info->quantizer         = v->quantizer_mode;
    info->extended_mv       = v->extended_mv;
    info->extended_dmv      = v->extended_dmv;
    info->overlap           = v->overlap;
    info->vstransform       = v->vstransform;
    info->loopfilter        = v->s.loop_filter;
    info->fastuvmc          = v->fastuvmc;
    info->range_mapy_flag   = v->range_mapy_flag;
    info->range_mapy        = v->range_mapy;
    info->range_mapuv_flag  = v->range_mapuv_flag;
    info->range_mapuv       = v->range_mapuv;
    /* Specific to simple/main profile only */
    info->multires          = v->multires;
    info->syncmarker        = v->resync_marker;
    info->rangered          = v->rangered | (v->rangeredfrm << 1);
    info->maxbframes        = v->s.max_b_frames;
    info->deblockEnable     = v->postprocflag & 1;
    info->pquant            = v->pq;

    return ff_vdpau_common_start_frame(pic_ctx, buffer, size);
}