예제 #1
0
static int ffmpeg_GetFrameBuf( struct AVCodecContext *p_context,
                               AVFrame *p_ff_pic )
{
    decoder_t *p_dec = (decoder_t *)p_context->opaque;
    decoder_sys_t *p_sys = p_dec->p_sys;

    /* */
    p_ff_pic->opaque = NULL;
#if ! LIBAVCODEC_VERSION_CHECK(54, 34, 0, 79, 101)
    p_ff_pic->pkt_pts = p_context->pkt ? p_context->pkt->pts : AV_NOPTS_VALUE;
#endif
#if LIBAVCODEC_VERSION_MAJOR < 54
    p_ff_pic->age = 256*256*256*64;
#endif

    if( p_sys->p_va )
        return ffmpeg_va_GetFrameBuf(p_context, p_ff_pic);

    if( !p_sys->b_direct_rendering )
        return avcodec_default_get_buffer( p_context, p_ff_pic );

    wait_mt( p_sys );
    /* Some codecs set pix_fmt only after the 1st frame has been decoded,
     * so we need to check for direct rendering again. */

    picture_t *p_pic = ffmpeg_dr_GetFrameBuf(p_context);
    if (!p_pic) {
        if( p_sys->i_direct_rendering_used != 0 )
        {
            msg_Warn( p_dec, "disabling direct rendering" );
            p_sys->i_direct_rendering_used = 0;
        }

        post_mt( p_sys );
        return avcodec_default_get_buffer( p_context, p_ff_pic );
    }

    if( p_sys->i_direct_rendering_used != 1 ) {
        msg_Dbg( p_dec, "using direct rendering" );
        p_sys->i_direct_rendering_used = 1;
    }

    p_context->draw_horiz_band = NULL;
    post_mt( p_sys );

    p_ff_pic->opaque = (void*)p_pic;
    p_ff_pic->type = FF_BUFFER_TYPE_USER;
    p_ff_pic->data[0] = p_pic->p[0].p_pixels;
    p_ff_pic->data[1] = p_pic->p[1].p_pixels;
    p_ff_pic->data[2] = p_pic->p[2].p_pixels;
    p_ff_pic->data[3] = NULL; /* alpha channel but I'm not sure */

    p_ff_pic->linesize[0] = p_pic->p[0].i_pitch;
    p_ff_pic->linesize[1] = p_pic->p[1].i_pitch;
    p_ff_pic->linesize[2] = p_pic->p[2].i_pitch;
    p_ff_pic->linesize[3] = 0;

    return 0;
}
예제 #2
0
/**
 * Callback used by libavcodec to get a frame buffer.
 *
 * It is used for direct rendering as well as to get the right PTS for each
 * decoded picture (even in indirect rendering mode).
 */
static int lavc_GetFrame(struct AVCodecContext *ctx, AVFrame *frame, int flags)
{
    decoder_t *dec = ctx->opaque;
    decoder_sys_t *sys = dec->p_sys;
    picture_t *pic;

    for (unsigned i = 0; i < AV_NUM_DATA_POINTERS; i++)
    {
        frame->data[i] = NULL;
        frame->linesize[i] = 0;
        frame->buf[i] = NULL;
    }

    if (sys->p_va != NULL)
        return lavc_va_GetFrame(ctx, frame, flags);

    frame->opaque = NULL;
    if (!sys->b_direct_rendering)
        return avcodec_default_get_buffer2(ctx, frame, flags);

    /* Some codecs set pix_fmt only after the 1st frame has been decoded,
     * so we need to check for direct rendering again. */
    wait_mt(sys);
    pic = lavc_dr_GetFrame(ctx, frame, flags);
    if (pic == NULL)
    {
        if (sys->i_direct_rendering_used != 0)
        {
            msg_Warn(dec, "disabling direct rendering");
            sys->i_direct_rendering_used = 0;
        }
        post_mt(sys);
        return avcodec_default_get_buffer2(ctx, frame, flags);
    }

    if (sys->i_direct_rendering_used != 1)
    {
        msg_Dbg(dec, "enabling direct rendering");
        sys->i_direct_rendering_used = 1;
    }
    post_mt(sys);

    frame->opaque = pic;
    static_assert(PICTURE_PLANE_MAX <= AV_NUM_DATA_POINTERS, "Oops!");
    for (unsigned i = 0; i < PICTURE_PLANE_MAX; i++)
    {
        frame->data[i] = pic->p[i].p_pixels;
        frame->linesize[i] = pic->p[i].i_pitch;
    }
    return 0;
}
예제 #3
0
파일: video.c 프로젝트: bobwxb/vlc
/**
 * Callback used by libavcodec to get a frame buffer.
 *
 * It is used for direct rendering as well as to get the right PTS for each
 * decoded picture (even in indirect rendering mode).
 */
static int lavc_GetFrame(struct AVCodecContext *ctx, AVFrame *frame, int flags)
{
    decoder_t *dec = ctx->opaque;
    decoder_sys_t *sys = dec->p_sys;
    picture_t *pic;

    for (unsigned i = 0; i < AV_NUM_DATA_POINTERS; i++)
    {
        frame->data[i] = NULL;
        frame->linesize[i] = 0;
        frame->buf[i] = NULL;
    }
    frame->opaque = NULL;

    wait_mt(sys);
    if (sys->p_va != NULL)
    {   /* TODO: Move this to get_format(). We are screwed if it fails here. */
        if (vlc_va_Setup(sys->p_va, ctx, &dec->fmt_out.video.i_chroma))
        {
            post_mt(sys);
            msg_Err(dec, "hardware acceleration setup failed");
            return -1;
        }
    }
    else if (!sys->b_direct_rendering)
    {
        post_mt(sys);
        return avcodec_default_get_buffer2(ctx, frame, flags);
    }

    /* The semaphore protects updates to fmt_out */
    pic = ffmpeg_NewPictBuf(dec, ctx);
    post_mt(sys);
    if (pic == NULL)
        return -1;

    if (sys->p_va != NULL)
        return lavc_va_GetFrame(ctx, frame, pic);

    /* Some codecs set pix_fmt only after the 1st frame has been decoded,
     * so we need to check for direct rendering again. */
    int ret = lavc_dr_GetFrame(ctx, frame, pic);
    if (ret)
        ret = avcodec_default_get_buffer2(ctx, frame, flags);
    return ret;
}
예제 #4
0
파일: video.c 프로젝트: bobwxb/vlc
/*****************************************************************************
 * EndVideo: decoder destruction
 *****************************************************************************
 * This function is called when the thread ends after a successful
 * initialization.
 *****************************************************************************/
void EndVideoDec( decoder_t *p_dec )
{
    decoder_sys_t *p_sys = p_dec->p_sys;

    post_mt( p_sys );

    /* do not flush buffers if codec hasn't been opened (theora/vorbis/VC1) */
    if( p_sys->p_context->codec )
        avcodec_flush_buffers( p_sys->p_context );

    wait_mt( p_sys );

    ffmpeg_CloseCodec( p_dec );

    if( p_sys->p_va )
        vlc_va_Delete( p_sys->p_va, p_sys->p_context );

    vlc_sem_destroy( &p_sys->sem_mt );
}
예제 #5
0
/*****************************************************************************
 * DecodeVideo: Called to decode one or more frames
 *****************************************************************************/
picture_t *DecodeVideo( decoder_t *p_dec, block_t **pp_block )
{
    decoder_sys_t *p_sys = p_dec->p_sys;
    AVCodecContext *p_context = p_sys->p_context;
    int b_drawpicture;
    int b_null_size = false;
    block_t *p_block;

    if( !pp_block || !*pp_block )
        return NULL;

    if( !p_context->extradata_size && p_dec->fmt_in.i_extra )
    {
        ffmpeg_InitCodec( p_dec );
        if( p_sys->b_delayed_open )
        {
            if( ffmpeg_OpenCodec( p_dec ) )
                msg_Err( p_dec, "cannot open codec (%s)", p_sys->psz_namecodec );
        }
    }

    p_block = *pp_block;
    if( p_sys->b_delayed_open )
    {
        block_Release( p_block );
        return NULL;
    }

    if( p_block->i_flags & (BLOCK_FLAG_DISCONTINUITY|BLOCK_FLAG_CORRUPTED) )
    {
        p_sys->i_pts = VLC_TS_INVALID; /* To make sure we recover properly */

        p_sys->i_late_frames = 0;

        if( p_block->i_flags & BLOCK_FLAG_DISCONTINUITY )
            avcodec_flush_buffers( p_context );

        block_Release( p_block );
        return NULL;
    }

    if( p_block->i_flags & BLOCK_FLAG_PREROLL )
    {
        /* Do not care about late frames when prerolling
         * TODO avoid decoding of non reference frame
         * (ie all B except for H264 where it depends only on nal_ref_idc) */
        p_sys->i_late_frames = 0;
    }

    if( !p_dec->b_pace_control && (p_sys->i_late_frames > 0) &&
        (mdate() - p_sys->i_late_frames_start > INT64_C(5000000)) )
    {
        if( p_sys->i_pts > VLC_TS_INVALID )
        {
            msg_Err( p_dec, "more than 5 seconds of late video -> "
                     "dropping frame (computer too slow ?)" );
            p_sys->i_pts = VLC_TS_INVALID; /* To make sure we recover properly */
        }
        block_Release( p_block );
        p_sys->i_late_frames--;
        return NULL;
    }

    /* A good idea could be to decode all I pictures and see for the other */
    if( !p_dec->b_pace_control &&
        p_sys->b_hurry_up &&
        (p_sys->i_late_frames > 4) )
    {
        b_drawpicture = 0;
        if( p_sys->i_late_frames < 12 )
        {
            p_context->skip_frame =
                    (p_sys->i_skip_frame <= AVDISCARD_BIDIR) ?
                    AVDISCARD_BIDIR : p_sys->i_skip_frame;
        }
        else
        {
            /* picture too late, won't decode
             * but break picture until a new I, and for mpeg4 ...*/
            p_sys->i_late_frames--; /* needed else it will never be decrease */
            block_Release( p_block );
            return NULL;
        }
    }
    else
    {
        if( p_sys->b_hurry_up )
            p_context->skip_frame = p_sys->i_skip_frame;
        if( !(p_block->i_flags & BLOCK_FLAG_PREROLL) )
            b_drawpicture = 1;
        else
            b_drawpicture = 0;
    }

    if( p_context->width <= 0 || p_context->height <= 0 )
    {
        if( p_sys->b_hurry_up )
            p_context->skip_frame = p_sys->i_skip_frame;
        b_null_size = true;
    }
    else if( !b_drawpicture )
    {
        /* It creates broken picture
         * FIXME either our parser or ffmpeg is broken */
#if 0
        if( p_sys->b_hurry_up )
            p_context->skip_frame = __MAX( p_context->skip_frame,
                                                  AVDISCARD_NONREF );
#endif
    }

    /*
     * Do the actual decoding now */

    /* Don't forget that ffmpeg requires a little more bytes
     * that the real frame size */
    if( p_block->i_buffer > 0 )
    {
        p_sys->b_flush = ( p_block->i_flags & BLOCK_FLAG_END_OF_SEQUENCE ) != 0;

        p_block = block_Realloc( p_block, 0,
                            p_block->i_buffer + FF_INPUT_BUFFER_PADDING_SIZE );
        if( !p_block )
            return NULL;
        p_block->i_buffer -= FF_INPUT_BUFFER_PADDING_SIZE;
        *pp_block = p_block;
        memset( p_block->p_buffer + p_block->i_buffer, 0,
                FF_INPUT_BUFFER_PADDING_SIZE );
    }

    while( p_block->i_buffer > 0 || p_sys->b_flush )
    {
        int i_used, b_gotpicture;
        picture_t *p_pic;

        /* Set the PTS/DTS in the context reordered_opaque field */
        if( p_block->i_pts > VLC_TS_INVALID  )
            p_context->reordered_opaque = (p_block->i_pts << 1) | 0;
        else if( p_block->i_dts > VLC_TS_INVALID )
            p_context->reordered_opaque = (p_block->i_dts << 1) | 1;
        else
            p_context->reordered_opaque = INT64_MIN;
        p_sys->p_ff_pic->reordered_opaque = p_context->reordered_opaque;

        /* Make sure we don't reuse the same timestamps twice */
        p_block->i_pts =
        p_block->i_dts = VLC_TS_INVALID;

        post_mt( p_sys );

        i_used = avcodec_decode_video( p_context, p_sys->p_ff_pic,
                                       &b_gotpicture,
                                       p_block->i_buffer <= 0 && p_sys->b_flush ? NULL : p_block->p_buffer, p_block->i_buffer );

        if( b_null_size && !p_sys->b_flush &&
            p_context->width > 0 && p_context->height > 0 )
        {
            /* Reparse it to not drop the I frame */
            b_null_size = false;
            if( p_sys->b_hurry_up )
                p_context->skip_frame = p_sys->i_skip_frame;
            i_used = avcodec_decode_video( p_context, p_sys->p_ff_pic,
                                           &b_gotpicture, p_block->p_buffer,
                                           p_block->i_buffer );
        }
        wait_mt( p_sys );

        if( p_sys->b_flush )
            p_sys->b_first_frame = true;

        if( p_block->i_buffer <= 0 )
            p_sys->b_flush = false;

        if( i_used < 0 )
        {
            if( b_drawpicture )
                msg_Warn( p_dec, "cannot decode one frame (%zu bytes)",
                          p_block->i_buffer );
            block_Release( p_block );
            return NULL;
        }
        else if( i_used > p_block->i_buffer ||
                 p_context->thread_count > 1 )
        {
            i_used = p_block->i_buffer;
        }

        /* Consumed bytes */
        p_block->i_buffer -= i_used;
        p_block->p_buffer += i_used;

        /* Nothing to display */
        if( !b_gotpicture )
        {
            if( i_used == 0 ) break;
            continue;
        }

        /* Compute the PTS */
        mtime_t i_pts = VLC_TS_INVALID;
        if( p_sys->p_ff_pic->reordered_opaque != INT64_MIN )
        {
            mtime_t i_ts = p_sys->p_ff_pic->reordered_opaque >> 1;
            bool    b_dts = p_sys->p_ff_pic->reordered_opaque & 1;
            if( b_dts )
            {
                if( !p_context->has_b_frames ||
                    !p_sys->b_has_b_frames ||
                    !p_sys->p_ff_pic->reference ||
                    p_sys->i_pts <= VLC_TS_INVALID )
                    i_pts = i_ts;
            }
            else
            {
                i_pts = i_ts;
            }
        }
        if( i_pts <= VLC_TS_INVALID )
            i_pts = p_sys->i_pts;

        /* Interpolate the next PTS */
        if( i_pts > VLC_TS_INVALID )
            p_sys->i_pts = i_pts;
        if( p_sys->i_pts > VLC_TS_INVALID )
        {
            /* interpolate the next PTS */
            if( p_dec->fmt_in.video.i_frame_rate > 0 &&
                p_dec->fmt_in.video.i_frame_rate_base > 0 )
            {
                p_sys->i_pts += INT64_C(1000000) *
                    (2 + p_sys->p_ff_pic->repeat_pict) *
                    p_dec->fmt_in.video.i_frame_rate_base /
                    (2 * p_dec->fmt_in.video.i_frame_rate);
            }
            else if( p_context->time_base.den > 0 )
            {
#if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(52,20,0)
                int i_tick = p_context->ticks_per_frame;
                if( i_tick <= 0 )
                    i_tick = 1;
#else
                int i_tick = 1;
#endif

                p_sys->i_pts += INT64_C(1000000) *
                    (2 + p_sys->p_ff_pic->repeat_pict) *
                    i_tick * p_context->time_base.num /
                    (2 * p_context->time_base.den);
            }
        }

        /* Update frame late count (except when doing preroll) */
        mtime_t i_display_date = 0;
        if( !(p_block->i_flags & BLOCK_FLAG_PREROLL) )
            i_display_date = decoder_GetDisplayDate( p_dec, i_pts );

        if( i_display_date > 0 && i_display_date <= mdate() )
        {
            p_sys->i_late_frames++;
            if( p_sys->i_late_frames == 1 )
                p_sys->i_late_frames_start = mdate();
        }
        else
        {
            p_sys->i_late_frames = 0;
        }

        if( !b_drawpicture || ( !p_sys->p_va && !p_sys->p_ff_pic->linesize[0] ) )
            continue;

        if( !p_sys->p_ff_pic->opaque )
        {
            /* Get a new picture */
            p_pic = ffmpeg_NewPictBuf( p_dec, p_context );
            if( !p_pic )
            {
                block_Release( p_block );
                return NULL;
            }

            /* Fill p_picture_t from AVVideoFrame and do chroma conversion
             * if needed */
            ffmpeg_CopyPicture( p_dec, p_pic, p_sys->p_ff_pic );
        }
        else
        {
            p_pic = (picture_t *)p_sys->p_ff_pic->opaque;
            decoder_LinkPicture( p_dec, p_pic );
        }

        /* Sanity check (seems to be needed for some streams) */
        if( p_sys->p_ff_pic->pict_type == FF_B_TYPE )
        {
            p_sys->b_has_b_frames = true;
        }

        if( !p_dec->fmt_in.video.i_sar_num || !p_dec->fmt_in.video.i_sar_den )
        {
            /* Fetch again the aspect ratio in case it changed */
            p_dec->fmt_out.video.i_sar_num
                = p_context->sample_aspect_ratio.num;
            p_dec->fmt_out.video.i_sar_den
                = p_context->sample_aspect_ratio.den;

            if( !p_dec->fmt_out.video.i_sar_num || !p_dec->fmt_out.video.i_sar_den )
            {
                p_dec->fmt_out.video.i_sar_num = 1;
                p_dec->fmt_out.video.i_sar_den = 1;
            }
        }

        /* Send decoded frame to vout */
        if( i_pts > VLC_TS_INVALID)
        {
            p_pic->date = i_pts;

            if( p_sys->b_first_frame )
            {
                /* Hack to force display of still pictures */
                p_sys->b_first_frame = false;
                p_pic->b_force = true;
            }

            p_pic->i_nb_fields = 2 + p_sys->p_ff_pic->repeat_pict;
            p_pic->b_progressive = !p_sys->p_ff_pic->interlaced_frame;
            p_pic->b_top_field_first = p_sys->p_ff_pic->top_field_first;

            p_pic->i_qstride = p_sys->p_ff_pic->qstride;
            int i_mb_h = ( p_pic->format.i_height + 15 ) / 16;
            p_pic->p_q = malloc( p_pic->i_qstride * i_mb_h );
            memcpy( p_pic->p_q, p_sys->p_ff_pic->qscale_table,
                    p_pic->i_qstride * i_mb_h );
            switch( p_sys->p_ff_pic->qscale_type )
            {
                case FF_QSCALE_TYPE_MPEG1:
                    p_pic->i_qtype = QTYPE_MPEG1;
                    break;
                case FF_QSCALE_TYPE_MPEG2:
                    p_pic->i_qtype = QTYPE_MPEG2;
                    break;
                case FF_QSCALE_TYPE_H264:
                    p_pic->i_qtype = QTYPE_H264;
                    break;
            }

            return p_pic;
        }
        else
        {
            decoder_DeletePicture( p_dec, p_pic );
        }
    }
예제 #6
0
/*****************************************************************************
 * DecodeVideo: Called to decode one or more frames
 *****************************************************************************/
picture_t *DecodeVideo( decoder_t *p_dec, block_t **pp_block )
{
    decoder_sys_t *p_sys = p_dec->p_sys;
    AVCodecContext *p_context = p_sys->p_context;
    int b_drawpicture;
    block_t *p_block;

    if( !pp_block )
        return NULL;

    if( !p_context->extradata_size && p_dec->fmt_in.i_extra )
    {
        ffmpeg_InitCodec( p_dec );
        if( p_sys->b_delayed_open )
        {
            if( ffmpeg_OpenCodec( p_dec ) )
                msg_Err( p_dec, "cannot open codec (%s)", p_sys->psz_namecodec );
        }
    }

    p_block = *pp_block;
    if(!p_block && !(p_sys->p_codec->capabilities & CODEC_CAP_DELAY) )
        return NULL;

    if( p_sys->b_delayed_open )
    {
        if( p_block )
            block_Release( p_block );
        return NULL;
    }

    if( p_block)
    {
        if( p_block->i_flags & (BLOCK_FLAG_DISCONTINUITY|BLOCK_FLAG_CORRUPTED) )
        {
            p_sys->i_pts = VLC_TS_INVALID; /* To make sure we recover properly */

            p_sys->i_late_frames = 0;

            post_mt( p_sys );
            if( p_block->i_flags & BLOCK_FLAG_DISCONTINUITY )
                avcodec_flush_buffers( p_context );
            wait_mt( p_sys );

            block_Release( p_block );
            return NULL;
        }

        if( p_block->i_flags & BLOCK_FLAG_PREROLL )
        {
            /* Do not care about late frames when prerolling
             * TODO avoid decoding of non reference frame
             * (ie all B except for H264 where it depends only on nal_ref_idc) */
            p_sys->i_late_frames = 0;
        }
    }

    if( !p_dec->b_pace_control && (p_sys->i_late_frames > 0) &&
        (mdate() - p_sys->i_late_frames_start > INT64_C(5000000)) )
    {
        if( p_sys->i_pts > VLC_TS_INVALID )
        {
            p_sys->i_pts = VLC_TS_INVALID; /* To make sure we recover properly */
        }
        if( p_block )
            block_Release( p_block );
        p_sys->i_late_frames--;
        msg_Err( p_dec, "more than 5 seconds of late video -> "
                 "dropping frame (computer too slow ?)" );
        return NULL;
    }

    /* A good idea could be to decode all I pictures and see for the other */
    if( !p_dec->b_pace_control &&
        p_sys->b_hurry_up &&
        (p_sys->i_late_frames > 4) )
    {
        b_drawpicture = 0;
        if( p_sys->i_late_frames < 12 )
        {
            p_context->skip_frame =
                    (p_sys->i_skip_frame <= AVDISCARD_NONREF) ?
                    AVDISCARD_NONREF : p_sys->i_skip_frame;
        }
        else
        {
            /* picture too late, won't decode
             * but break picture until a new I, and for mpeg4 ...*/
            p_sys->i_late_frames--; /* needed else it will never be decrease */
            if( p_block )
                block_Release( p_block );
            msg_Warn( p_dec, "More than 4 late frames, dropping frame" );
            return NULL;
        }
    }
    else
    {
        if( p_sys->b_hurry_up )
            p_context->skip_frame = p_sys->i_skip_frame;
        if( !p_block || !(p_block->i_flags & BLOCK_FLAG_PREROLL) )
            b_drawpicture = 1;
        else
            b_drawpicture = 0;
    }

    if( p_context->width <= 0 || p_context->height <= 0 )
    {
        if( p_sys->b_hurry_up )
            p_context->skip_frame = p_sys->i_skip_frame;
    }
    else if( !b_drawpicture )
    {
        /* It creates broken picture
         * FIXME either our parser or ffmpeg is broken */
#if 0
        if( p_sys->b_hurry_up )
            p_context->skip_frame = __MAX( p_context->skip_frame,
                                                  AVDISCARD_NONREF );
#endif
    }

    /*
     * Do the actual decoding now */

    /* Don't forget that libavcodec requires a little more bytes
     * that the real frame size */
    if( p_block && p_block->i_buffer > 0 )
    {
        p_sys->b_flush = ( p_block->i_flags & BLOCK_FLAG_END_OF_SEQUENCE ) != 0;

        p_block = block_Realloc( p_block, 0,
                            p_block->i_buffer + FF_INPUT_BUFFER_PADDING_SIZE );
        if( !p_block )
            return NULL;
        p_block->i_buffer -= FF_INPUT_BUFFER_PADDING_SIZE;
        *pp_block = p_block;
        memset( p_block->p_buffer + p_block->i_buffer, 0,
                FF_INPUT_BUFFER_PADDING_SIZE );
    }

    while( !p_block || p_block->i_buffer > 0 || p_sys->b_flush )
    {
        int i_used, b_gotpicture;
        picture_t *p_pic;
        AVPacket pkt;

        post_mt( p_sys );

        av_init_packet( &pkt );
        if( p_block )
        {
            pkt.data = p_block->p_buffer;
            pkt.size = p_block->i_buffer;
            pkt.pts = p_block->i_pts;
            pkt.dts = p_block->i_dts;
        }
        else
        {
            /* Return delayed frames if codec has CODEC_CAP_DELAY */
            pkt.data = NULL;
            pkt.size = 0;
        }

#if LIBAVCODEC_VERSION_MAJOR >= 54
        if( !p_sys->palette_sent )
        {
            uint8_t *pal = av_packet_new_side_data(&pkt, AV_PKT_DATA_PALETTE, AVPALETTE_SIZE);
            if (pal) {
                memcpy(pal, p_dec->fmt_in.video.p_palette->palette, AVPALETTE_SIZE);
                p_sys->palette_sent = true;
            }
        }
#endif

        /* Make sure we don't reuse the same timestamps twice */
        if( p_block )
        {
            p_block->i_pts =
            p_block->i_dts = VLC_TS_INVALID;
        }

        i_used = avcodec_decode_video2( p_context, p_sys->p_ff_pic,
                                       &b_gotpicture, &pkt );

        wait_mt( p_sys );

        if( p_sys->b_flush )
            p_sys->b_first_frame = true;

        if( p_block )
        {
            if( p_block->i_buffer <= 0 )
                p_sys->b_flush = false;

            if( i_used < 0 )
            {
                if( b_drawpicture )
                    msg_Warn( p_dec, "cannot decode one frame (%zu bytes)",
                            p_block->i_buffer );
                block_Release( p_block );
                return NULL;
            }
            else if( (unsigned)i_used > p_block->i_buffer ||
                    p_context->thread_count > 1 )
            {
                i_used = p_block->i_buffer;
            }

            /* Consumed bytes */
            p_block->i_buffer -= i_used;
            p_block->p_buffer += i_used;
        }

        /* Nothing to display */
        if( !b_gotpicture )
        {
            if( i_used == 0 ) break;
            continue;
        }

        /* Sanity check (seems to be needed for some streams) */
        if( p_sys->p_ff_pic->pict_type == AV_PICTURE_TYPE_B)
        {
            p_sys->b_has_b_frames = true;
        }

        /* Compute the PTS */
        mtime_t i_pts =
                    p_sys->p_ff_pic->pkt_pts;
        if (i_pts <= VLC_TS_INVALID)
            i_pts = p_sys->p_ff_pic->pkt_dts;

        if( i_pts <= VLC_TS_INVALID )
            i_pts = p_sys->i_pts;

        /* Interpolate the next PTS */
        if( i_pts > VLC_TS_INVALID )
            p_sys->i_pts = i_pts;
        if( p_sys->i_pts > VLC_TS_INVALID )
        {
            /* interpolate the next PTS */
            if( p_dec->fmt_in.video.i_frame_rate > 0 &&
                p_dec->fmt_in.video.i_frame_rate_base > 0 )
            {
                p_sys->i_pts += INT64_C(1000000) *
                    (2 + p_sys->p_ff_pic->repeat_pict) *
                    p_dec->fmt_in.video.i_frame_rate_base /
                    (2 * p_dec->fmt_in.video.i_frame_rate);
            }
            else if( p_context->time_base.den > 0 )
            {
                int i_tick = p_context->ticks_per_frame;
                if( i_tick <= 0 )
                    i_tick = 1;

                p_sys->i_pts += INT64_C(1000000) *
                    (2 + p_sys->p_ff_pic->repeat_pict) *
                    i_tick * p_context->time_base.num /
                    (2 * p_context->time_base.den);
            }
        }

        /* Update frame late count (except when doing preroll) */
        mtime_t i_display_date = 0;
        if( !p_block || !(p_block->i_flags & BLOCK_FLAG_PREROLL) )
            i_display_date = decoder_GetDisplayDate( p_dec, i_pts );

        if( i_display_date > 0 && i_display_date <= mdate() )
        {
            p_sys->i_late_frames++;
            if( p_sys->i_late_frames == 1 )
                p_sys->i_late_frames_start = mdate();
        }
        else
        {
            p_sys->i_late_frames = 0;
        }

        if( !b_drawpicture || ( !p_sys->p_va && !p_sys->p_ff_pic->linesize[0] ) )
            continue;

        if( p_sys->p_va != NULL || p_sys->p_ff_pic->opaque == NULL )
        {
            /* Get a new picture */
            p_pic = ffmpeg_NewPictBuf( p_dec, p_context );
            if( !p_pic )
            {
                if( p_block )
                    block_Release( p_block );
                return NULL;
            }

            /* Fill p_picture_t from AVVideoFrame and do chroma conversion
             * if needed */
            ffmpeg_CopyPicture( p_dec, p_pic, p_sys->p_ff_pic );
        }
        else
        {
            p_pic = (picture_t *)p_sys->p_ff_pic->opaque;
            decoder_LinkPicture( p_dec, p_pic );
        }

        if( !p_dec->fmt_in.video.i_sar_num || !p_dec->fmt_in.video.i_sar_den )
        {
            /* Fetch again the aspect ratio in case it changed */
            p_dec->fmt_out.video.i_sar_num
                = p_context->sample_aspect_ratio.num;
            p_dec->fmt_out.video.i_sar_den
                = p_context->sample_aspect_ratio.den;

            if( !p_dec->fmt_out.video.i_sar_num || !p_dec->fmt_out.video.i_sar_den )
            {
                p_dec->fmt_out.video.i_sar_num = 1;
                p_dec->fmt_out.video.i_sar_den = 1;
            }
        }

        /* Send decoded frame to vout */
        if( i_pts > VLC_TS_INVALID)
        {
            p_pic->date = i_pts;

            if( p_sys->b_first_frame )
            {
                /* Hack to force display of still pictures */
                p_sys->b_first_frame = false;
                p_pic->b_force = true;
            }

            p_pic->i_nb_fields = 2 + p_sys->p_ff_pic->repeat_pict;
            p_pic->b_progressive = !p_sys->p_ff_pic->interlaced_frame;
            p_pic->b_top_field_first = p_sys->p_ff_pic->top_field_first;

            return p_pic;
        }
        else
        {
            decoder_DeletePicture( p_dec, p_pic );
        }
    }

    if( p_block )
        block_Release( p_block );
    return NULL;
}