Example #1
0
/*****************************************************************************
 * ffmpeg_GetFrameBuf: callback used by ffmpeg to get a frame buffer.
 *****************************************************************************
 * It is used for direct rendering as well as to get the right PTS for each
 * decoded picture (even in indirect rendering mode).
 *****************************************************************************/
static int ffmpeg_GetFrameBuf( struct AVCodecContext *p_context,
                               AVFrame *p_ff_pic )
{
    decoder_t *p_dec = (decoder_t *)p_context->opaque;
    decoder_sys_t *p_sys = p_dec->p_sys;
    picture_t *p_pic;

    /* Set picture PTS */
    if( p_sys->input_pts )
    {
        p_ff_pic->pts = p_sys->input_pts;
    }
    else if( p_sys->input_dts )
    {
        /* Some demuxers only set the dts so let's try to find a useful
         * timestamp from this */
        if( !p_context->has_b_frames || !p_sys->b_has_b_frames ||
            !p_ff_pic->reference || !p_sys->i_pts )
        {
            p_ff_pic->pts = p_sys->input_dts;
        }
        else p_ff_pic->pts = 0;
    }
    else p_ff_pic->pts = 0;

    if( p_sys->i_pts ) /* make sure 1st frame has a pts > 0 */
    {
        p_sys->input_pts = p_sys->input_dts = 0;
    }

    p_ff_pic->opaque = 0;

    /* Not much to do in indirect rendering mode */
    if( !p_sys->b_direct_rendering || p_sys->b_pp )
    {
        return avcodec_default_get_buffer( p_context, p_ff_pic );
    }

    /* Some codecs set pix_fmt only after the 1st frame has been decoded,
     * so this check is necessary. */
    if( !ffmpeg_PixFmtToChroma( p_context->pix_fmt ) ||
        p_sys->p_context->width % 16 || p_sys->p_context->height % 16 )
    {
        msg_Dbg( p_dec, "disabling direct rendering" );
        p_sys->b_direct_rendering = 0;
        return avcodec_default_get_buffer( p_context, p_ff_pic );
    }

    /* Get a new picture */
    //p_sys->p_vout->render.b_allow_modify_pics = 0;
    p_pic = ffmpeg_NewPictBuf( p_dec, p_sys->p_context );
    if( !p_pic )
    {
        p_sys->b_direct_rendering = 0;
        return avcodec_default_get_buffer( p_context, p_ff_pic );
    }
    p_sys->p_context->draw_horiz_band = NULL;

    p_ff_pic->opaque = (void*)p_pic;
    p_ff_pic->type = FF_BUFFER_TYPE_USER;
    p_ff_pic->data[0] = p_pic->p[0].p_pixels;
    p_ff_pic->data[1] = p_pic->p[1].p_pixels;
    p_ff_pic->data[2] = p_pic->p[2].p_pixels;
    p_ff_pic->data[3] = NULL; /* alpha channel but I'm not sure */

    p_ff_pic->linesize[0] = p_pic->p[0].i_pitch;
    p_ff_pic->linesize[1] = p_pic->p[1].i_pitch;
    p_ff_pic->linesize[2] = p_pic->p[2].i_pitch;
    p_ff_pic->linesize[3] = 0;

    if( p_ff_pic->reference != 0 )
    {
        p_dec->pf_picture_link( p_dec, p_pic );
    }

    /* FIXME what is that, should give good value */
    p_ff_pic->age = 256*256*256*64; // FIXME FIXME from ffmpeg

    return 0;
}
Example #2
0
/*****************************************************************************
 * ffmpeg_CopyPicture: copy a picture from ffmpeg internal buffers to a
 *                     picture_t structure (when not in direct rendering mode).
 *****************************************************************************/
static void ffmpeg_CopyPicture( decoder_t *p_dec,
                                picture_t *p_pic, AVFrame *p_ff_pic )
{
    decoder_sys_t *p_sys = p_dec->p_sys;

    if( ffmpeg_PixFmtToChroma( p_sys->p_context->pix_fmt ) )
    {
        int i_plane, i_size, i_line;
        uint8_t *p_dst, *p_src;
        int i_src_stride, i_dst_stride;

#ifdef LIBAVCODEC_PP
        if( p_sys->p_pp && p_sys->b_pp )
            E_(PostprocPict)( p_dec, p_sys->p_pp, p_pic, p_ff_pic );
        else
#endif
        for( i_plane = 0; i_plane < p_pic->i_planes; i_plane++ )
        {
            p_src  = p_ff_pic->data[i_plane];
            p_dst = p_pic->p[i_plane].p_pixels;
            i_src_stride = p_ff_pic->linesize[i_plane];
            i_dst_stride = p_pic->p[i_plane].i_pitch;

            i_size = __MIN( i_src_stride, i_dst_stride );
            for( i_line = 0; i_line < p_pic->p[i_plane].i_visible_lines;
                 i_line++ )
            {
                p_dec->p_vlc->pf_memcpy( p_dst, p_src, i_size );
                p_src += i_src_stride;
                p_dst += i_dst_stride;
            }
        }
    }
    else
    {
        AVPicture dest_pic;
        int i;

        /* we need to convert to I420 */
        switch( p_sys->p_context->pix_fmt )
        {
        case PIX_FMT_YUV410P:
        case PIX_FMT_YUV411P:
        case PIX_FMT_PAL8:
            for( i = 0; i < p_pic->i_planes; i++ )
            {
                dest_pic.data[i] = p_pic->p[i].p_pixels;
                dest_pic.linesize[i] = p_pic->p[i].i_pitch;
            }
            img_convert( &dest_pic, PIX_FMT_YUV420P,
                         (AVPicture *)p_ff_pic,
                         p_sys->p_context->pix_fmt,
                         p_sys->p_context->width,
                         p_sys->p_context->height );
            break;
        default:
            msg_Err( p_dec, "don't know how to convert chroma %i",
                     p_sys->p_context->pix_fmt );
            p_dec->b_error = 1;
            break;
        }
    }
}
Example #3
0
/* Returns a new picture buffer */
static inline picture_t *ffmpeg_NewPictBuf( decoder_t *p_dec,
                                            AVCodecContext *p_context )
{
    decoder_sys_t *p_sys = p_dec->p_sys;
    picture_t *p_pic;

    p_dec->fmt_out.video.i_width = p_context->width;
    p_dec->fmt_out.video.i_height = p_context->height;
    p_dec->fmt_out.i_codec = ffmpeg_PixFmtToChroma( p_context->pix_fmt );

    if( !p_context->width || !p_context->height )
    {
        return NULL; /* invalid display size */
    }

    if( !p_dec->fmt_out.i_codec )
    {
        /* we make conversion if possible*/
        p_dec->fmt_out.i_codec = VLC_FOURCC('I','4','2','0');
    }

    /* If an aspect-ratio was specified in the input format then force it */
    if( p_dec->fmt_in.video.i_aspect )
    {
        p_dec->fmt_out.video.i_aspect = p_dec->fmt_in.video.i_aspect;
    }
    else
    {
#if LIBAVCODEC_BUILD >= 4687
        p_dec->fmt_out.video.i_aspect =
            VOUT_ASPECT_FACTOR * ( av_q2d(p_context->sample_aspect_ratio) *
                p_context->width / p_context->height );
#else
        p_dec->fmt_out.video.i_aspect =
            VOUT_ASPECT_FACTOR * p_context->aspect_ratio;
#endif
        if( p_dec->fmt_out.video.i_aspect == 0 )
        {
            p_dec->fmt_out.video.i_aspect =
                VOUT_ASPECT_FACTOR * p_context->width / p_context->height;
        }
    }

    if( p_context->frame_rate > 0 && p_context->frame_rate_base > 0 )
    {
        p_dec->fmt_out.video.i_frame_rate = p_context->frame_rate;
        p_dec->fmt_out.video.i_frame_rate_base = p_context->frame_rate_base;
    }

    p_pic = p_dec->pf_vout_buffer_new( p_dec );

#ifdef LIBAVCODEC_PP
    if( p_sys->p_pp && p_sys->b_pp && !p_sys->b_pp_init )
    {
        E_(InitPostproc)( p_dec, p_sys->p_pp, p_context->width,
                          p_context->height, p_context->pix_fmt );
        p_sys->b_pp_init = VLC_TRUE;
    }
#endif

    return p_pic;
}
Example #4
0
/*****************************************************************************
 * InitVideo: initialize the video decoder
 *****************************************************************************
 * the ffmpeg codec will be opened, some memory allocated. The vout is not yet
 * opened (done after the first decoded frame).
 *****************************************************************************/
int E_(InitVideoDec)( decoder_t *p_dec, AVCodecContext *p_context,
                      AVCodec *p_codec, int i_codec_id, char *psz_namecodec )
{
    decoder_sys_t *p_sys;
    vlc_value_t lockval;
    vlc_value_t val;

    var_Get( p_dec->p_libvlc, "avcodec", &lockval );

    /* Allocate the memory needed to store the decoder's structure */
    if( ( p_dec->p_sys = p_sys =
          (decoder_sys_t *)malloc(sizeof(decoder_sys_t)) ) == NULL )
    {
        msg_Err( p_dec, "out of memory" );
        return VLC_EGENERIC;
    }

    p_dec->p_sys->p_context = p_context;
    p_dec->p_sys->p_codec = p_codec;
    p_dec->p_sys->i_codec_id = i_codec_id;
    p_dec->p_sys->psz_namecodec = psz_namecodec;
    p_sys->p_ff_pic = avcodec_alloc_frame();

    /* ***** Fill p_context with init values ***** */
    /* FIXME: remove when ffmpeg deals properly with avc1 */
    if( p_dec->fmt_in.i_codec != VLC_FOURCC('a','v','c','1') )
    /* End FIXME */
    p_sys->p_context->codec_tag = ffmpeg_CodecTag( p_dec->fmt_in.i_codec );
    p_sys->p_context->width  = p_dec->fmt_in.video.i_width;
    p_sys->p_context->height = p_dec->fmt_in.video.i_height;
    p_sys->p_context->bits_per_sample = p_dec->fmt_in.video.i_bits_per_pixel;

    /*  ***** Get configuration of ffmpeg plugin ***** */
    p_sys->p_context->workaround_bugs =
        config_GetInt( p_dec, "ffmpeg-workaround-bugs" );
    p_sys->p_context->error_resilience =
        config_GetInt( p_dec, "ffmpeg-error-resilience" );

    var_Create( p_dec, "grayscale", VLC_VAR_BOOL | VLC_VAR_DOINHERIT );
    var_Get( p_dec, "grayscale", &val );
    if( val.b_bool ) p_sys->p_context->flags |= CODEC_FLAG_GRAY;

    var_Create( p_dec, "ffmpeg-vismv", VLC_VAR_INTEGER | VLC_VAR_DOINHERIT );
    var_Get( p_dec, "ffmpeg-vismv", &val );
#if LIBAVCODEC_BUILD >= 4698
    if( val.i_int ) p_sys->p_context->debug_mv = val.i_int;
#endif

    var_Create( p_dec, "ffmpeg-lowres", VLC_VAR_INTEGER | VLC_VAR_DOINHERIT );
    var_Get( p_dec, "ffmpeg-lowres", &val );
#if LIBAVCODEC_BUILD >= 4723
    if( val.i_int > 0 && val.i_int <= 2 ) p_sys->p_context->lowres = val.i_int;
#endif

    /* ***** ffmpeg frame skipping ***** */
    var_Create( p_dec, "ffmpeg-hurry-up", VLC_VAR_BOOL | VLC_VAR_DOINHERIT );
    var_Get( p_dec, "ffmpeg-hurry-up", &val );
    p_sys->b_hurry_up = val.b_bool;

    /* ***** ffmpeg direct rendering ***** */
    p_sys->b_direct_rendering = 0;
    var_Create( p_dec, "ffmpeg-dr", VLC_VAR_BOOL | VLC_VAR_DOINHERIT );
    var_Get( p_dec, "ffmpeg-dr", &val );
    if( val.b_bool && (p_sys->p_codec->capabilities & CODEC_CAP_DR1) &&
        ffmpeg_PixFmtToChroma( p_sys->p_context->pix_fmt ) &&
        /* Apparently direct rendering doesn't work with YUV422P */
        p_sys->p_context->pix_fmt != PIX_FMT_YUV422P &&
        /* H264 uses too many reference frames */
        p_sys->i_codec_id != CODEC_ID_H264 &&
        !(p_sys->p_context->width % 16) && !(p_sys->p_context->height % 16) &&
#if LIBAVCODEC_BUILD >= 4698
        !p_sys->p_context->debug_mv )
#else
        1 )
#endif
    {
        /* Some codecs set pix_fmt only after the 1st frame has been decoded,
         * so we need to do another check in ffmpeg_GetFrameBuf() */
        p_sys->b_direct_rendering = 1;
    }

#ifdef LIBAVCODEC_PP
    p_sys->p_pp = NULL;
    p_sys->b_pp = p_sys->b_pp_async = p_sys->b_pp_init = VLC_FALSE;
    p_sys->p_pp = E_(OpenPostproc)( p_dec, &p_sys->b_pp_async );
#endif

    /* ffmpeg doesn't properly release old pictures when frames are skipped */
    //if( p_sys->b_hurry_up ) p_sys->b_direct_rendering = 0;
    if( p_sys->b_direct_rendering )
    {
        msg_Dbg( p_dec, "using direct rendering" );
        p_sys->p_context->flags |= CODEC_FLAG_EMU_EDGE;
    }

    /* Always use our get_buffer wrapper so we can calculate the
     * PTS correctly */
    p_sys->p_context->get_buffer = ffmpeg_GetFrameBuf;
    p_sys->p_context->release_buffer = ffmpeg_ReleaseFrameBuf;
    p_sys->p_context->opaque = p_dec;

    /* ***** init this codec with special data ***** */
    if( p_dec->fmt_in.i_extra )
    {
        int i_size = p_dec->fmt_in.i_extra;

        if( p_sys->i_codec_id == CODEC_ID_SVQ3 )
        {
            uint8_t *p;

            p_sys->p_context->extradata_size = i_size + 12;
            p = p_sys->p_context->extradata  =
                malloc( p_sys->p_context->extradata_size );

            memcpy( &p[0],  "SVQ3", 4 );
            memset( &p[4], 0, 8 );
            memcpy( &p[12], p_dec->fmt_in.p_extra, i_size );

            /* Now remove all atoms before the SMI one */
            if( p_sys->p_context->extradata_size > 0x5a &&
                strncmp( &p[0x56], "SMI ", 4 ) )
            {
                uint8_t *psz = &p[0x52];

                while( psz < &p[p_sys->p_context->extradata_size - 8] )
                {
                    int i_size = GetDWBE( psz );
                    if( i_size <= 1 )
                    {
                        /* FIXME handle 1 as long size */
                        break;
                    }
                    if( !strncmp( &psz[4], "SMI ", 4 ) )
                    {
                        memmove( &p[0x52], psz,
                                 &p[p_sys->p_context->extradata_size] - psz );
                        break;
                    }

                    psz += i_size;
                }
            }
        }
        else if( p_dec->fmt_in.i_codec == VLC_FOURCC( 'R', 'V', '1', '0' ) ||
                 p_dec->fmt_in.i_codec == VLC_FOURCC( 'R', 'V', '1', '3' ) ||
                 p_dec->fmt_in.i_codec == VLC_FOURCC( 'R', 'V', '2', '0' ) )
        {
            if( p_dec->fmt_in.i_extra == 8 )
            {
                p_sys->p_context->extradata_size = 8;
                p_sys->p_context->extradata = malloc( 8 );

                memcpy( p_sys->p_context->extradata,
                        p_dec->fmt_in.p_extra,
                        p_dec->fmt_in.i_extra );
                p_sys->p_context->sub_id= ((uint32_t*)p_dec->fmt_in.p_extra)[1];

                msg_Warn( p_dec, "using extra data for RV codec sub_id=%08x",
                          p_sys->p_context->sub_id );
            }
        }
        /* FIXME: remove when ffmpeg deals properly with avc1 */
        else if( p_dec->fmt_in.i_codec == VLC_FOURCC('a','v','c','1') )
        {
            ;
        }
        /* End FIXME */
        else
        {
            p_sys->p_context->extradata_size = i_size;
            p_sys->p_context->extradata =
                malloc( i_size + FF_INPUT_BUFFER_PADDING_SIZE );
            memcpy( p_sys->p_context->extradata,
                    p_dec->fmt_in.p_extra, i_size );
            memset( &((uint8_t*)p_sys->p_context->extradata)[i_size],
                    0, FF_INPUT_BUFFER_PADDING_SIZE );
        }
    }

    /* ***** misc init ***** */
    p_sys->input_pts = p_sys->input_dts = 0;
    p_sys->i_pts = 0;
    p_sys->b_has_b_frames = VLC_FALSE;
    p_sys->b_first_frame = VLC_TRUE;
    p_sys->i_late_frames = 0;
    p_sys->i_buffer = 0;
    p_sys->i_buffer_orig = 1;
    p_sys->p_buffer_orig = p_sys->p_buffer = malloc( p_sys->i_buffer_orig );

    /* Set output properties */
    p_dec->fmt_out.i_cat = VIDEO_ES;
    p_dec->fmt_out.i_codec = ffmpeg_PixFmtToChroma( p_context->pix_fmt );

    /* Setup palette */
#if LIBAVCODEC_BUILD >= 4688
    if( p_dec->fmt_in.video.p_palette )
        p_sys->p_context->palctrl =
            (AVPaletteControl *)p_dec->fmt_in.video.p_palette;
    else
        p_sys->p_context->palctrl = &palette_control;
#endif

    /* ***** Open the codec ***** */
    vlc_mutex_lock( lockval.p_address );
    if( avcodec_open( p_sys->p_context, p_sys->p_codec ) < 0 )
    {
        vlc_mutex_unlock( lockval.p_address );
        msg_Err( p_dec, "cannot open codec (%s)", p_sys->psz_namecodec );
        free( p_sys );
        return VLC_EGENERIC;
    }
    vlc_mutex_unlock( lockval.p_address );
    msg_Dbg( p_dec, "ffmpeg codec (%s) started", p_sys->psz_namecodec );


    return VLC_SUCCESS;
}
Example #5
0
/*****************************************************************************
 * InitVideo: initialize the video decoder
 *****************************************************************************
 * the ffmpeg codec will be opened, some memory allocated. The vout is not yet
 * opened (done after the first decoded frame).
 *****************************************************************************/
int E_(InitVideoDec)( decoder_t *p_dec, AVCodecContext *p_context,
                      AVCodec *p_codec, int i_codec_id, char *psz_namecodec )
{
    decoder_sys_t *p_sys;
    vlc_value_t lockval;
    vlc_value_t val;

    var_Get( p_dec->p_libvlc, "avcodec", &lockval );

    /* Allocate the memory needed to store the decoder's structure */
    if( ( p_dec->p_sys = p_sys =
          (decoder_sys_t *)malloc(sizeof(decoder_sys_t)) ) == NULL )
    {
        msg_Err( p_dec, "out of memory" );
        return VLC_EGENERIC;
    }

    p_dec->p_sys->p_context = p_context;
    p_dec->p_sys->p_codec = p_codec;
    p_dec->p_sys->i_codec_id = i_codec_id;
    p_dec->p_sys->psz_namecodec = psz_namecodec;
    p_sys->p_ff_pic = avcodec_alloc_frame();

    /* ***** Fill p_context with init values ***** */
    p_sys->p_context->codec_tag = ffmpeg_CodecTag( p_dec->fmt_in.i_codec );
    p_sys->p_context->width  = p_dec->fmt_in.video.i_width;
    p_sys->p_context->height = p_dec->fmt_in.video.i_height;
    p_sys->p_context->bits_per_sample = p_dec->fmt_in.video.i_bits_per_pixel;

    /*  ***** Get configuration of ffmpeg plugin ***** */
    p_sys->p_context->workaround_bugs =
        config_GetInt( p_dec, "ffmpeg-workaround-bugs" );
    p_sys->p_context->error_resilience =
        config_GetInt( p_dec, "ffmpeg-error-resilience" );

    var_Create( p_dec, "grayscale", VLC_VAR_BOOL | VLC_VAR_DOINHERIT );
    var_Get( p_dec, "grayscale", &val );
    if( val.b_bool ) p_sys->p_context->flags |= CODEC_FLAG_GRAY;

    var_Create( p_dec, "ffmpeg-vismv", VLC_VAR_INTEGER | VLC_VAR_DOINHERIT );
    var_Get( p_dec, "ffmpeg-vismv", &val );
#if LIBAVCODEC_BUILD >= 4698
    if( val.i_int ) p_sys->p_context->debug_mv = val.i_int;
#endif

    var_Create( p_dec, "ffmpeg-lowres", VLC_VAR_INTEGER | VLC_VAR_DOINHERIT );
    var_Get( p_dec, "ffmpeg-lowres", &val );
#if LIBAVCODEC_BUILD >= 4723
    if( val.i_int > 0 && val.i_int <= 2 ) p_sys->p_context->lowres = val.i_int;
#endif

    var_Create( p_dec, "ffmpeg-skiploopfilter",
                VLC_VAR_INTEGER | VLC_VAR_DOINHERIT );
    var_Get( p_dec, "ffmpeg-skiploopfilter", &val );
#if LIBAVCODEC_BUILD >= 4758
    if( val.i_int > 0 ) p_sys->p_context->skip_loop_filter = AVDISCARD_NONREF;
    if( val.i_int > 1 ) p_sys->p_context->skip_loop_filter = AVDISCARD_BIDIR;
    if( val.i_int > 2 ) p_sys->p_context->skip_loop_filter = AVDISCARD_NONKEY;
    if( val.i_int > 3 ) p_sys->p_context->skip_loop_filter = AVDISCARD_ALL;
#endif

    /* ***** ffmpeg frame skipping ***** */
    var_Create( p_dec, "ffmpeg-hurry-up", VLC_VAR_BOOL | VLC_VAR_DOINHERIT );
    var_Get( p_dec, "ffmpeg-hurry-up", &val );
    p_sys->b_hurry_up = val.b_bool;

    /* ***** ffmpeg direct rendering ***** */
    p_sys->b_direct_rendering = 0;
    var_Create( p_dec, "ffmpeg-dr", VLC_VAR_BOOL | VLC_VAR_DOINHERIT );
    var_Get( p_dec, "ffmpeg-dr", &val );
    if( val.b_bool && (p_sys->p_codec->capabilities & CODEC_CAP_DR1) &&
        /* Apparently direct rendering doesn't work with YUV422P */
        p_sys->p_context->pix_fmt != PIX_FMT_YUV422P &&
        /* H264 uses too many reference frames */
        p_sys->i_codec_id != CODEC_ID_H264 &&
#if LIBAVCODEC_BUILD >= 4698
        !p_sys->p_context->debug_mv )
#else
        1 )
#endif
    {
        /* Some codecs set pix_fmt only after the 1st frame has been decoded,
         * so we need to do another check in ffmpeg_GetFrameBuf() */
        p_sys->b_direct_rendering = 1;
    }

#ifdef LIBAVCODEC_PP
    p_sys->p_pp = NULL;
    p_sys->b_pp = p_sys->b_pp_async = p_sys->b_pp_init = VLC_FALSE;
    p_sys->p_pp = E_(OpenPostproc)( p_dec, &p_sys->b_pp_async );
#endif

    /* ffmpeg doesn't properly release old pictures when frames are skipped */
    //if( p_sys->b_hurry_up ) p_sys->b_direct_rendering = 0;
    if( p_sys->b_direct_rendering )
    {
        msg_Dbg( p_dec, "using direct rendering" );
        p_sys->p_context->flags |= CODEC_FLAG_EMU_EDGE;
    }

    /* Always use our get_buffer wrapper so we can calculate the
     * PTS correctly */
    p_sys->p_context->get_buffer = ffmpeg_GetFrameBuf;
    p_sys->p_context->release_buffer = ffmpeg_ReleaseFrameBuf;
    p_sys->p_context->opaque = p_dec;

    /* ***** init this codec with special data ***** */
    ffmpeg_InitCodec( p_dec );

    /* ***** misc init ***** */
    p_sys->input_pts = p_sys->input_dts = 0;
    p_sys->i_pts = 0;
    p_sys->b_has_b_frames = VLC_FALSE;
    p_sys->b_first_frame = VLC_TRUE;
    p_sys->i_late_frames = 0;
    p_sys->i_buffer = 0;
    p_sys->i_buffer_orig = 1;
    p_sys->p_buffer_orig = p_sys->p_buffer = malloc( p_sys->i_buffer_orig );

    /* Set output properties */
    p_dec->fmt_out.i_cat = VIDEO_ES;
    p_dec->fmt_out.i_codec = ffmpeg_PixFmtToChroma( p_context->pix_fmt );

    /* Setup palette */
#if LIBAVCODEC_BUILD >= 4688
    if( p_dec->fmt_in.video.p_palette )
        p_sys->p_context->palctrl =
            (AVPaletteControl *)p_dec->fmt_in.video.p_palette;
    else
        p_sys->p_context->palctrl = &palette_control;
#endif

    /* ***** Open the codec ***** */
    vlc_mutex_lock( lockval.p_address );
    if( avcodec_open( p_sys->p_context, p_sys->p_codec ) < 0 )
    {
        vlc_mutex_unlock( lockval.p_address );
        msg_Err( p_dec, "cannot open codec (%s)", p_sys->psz_namecodec );
        free( p_sys );
        return VLC_EGENERIC;
    }
    vlc_mutex_unlock( lockval.p_address );
    msg_Dbg( p_dec, "ffmpeg codec (%s) started", p_sys->psz_namecodec );


    return VLC_SUCCESS;
}