コード例 #1
0
ファイル: subpicture.c プロジェクト: CityFire/vlc
void subpicture_Update( subpicture_t *p_subpicture,
                        const video_format_t *p_fmt_src,
                        const video_format_t *p_fmt_dst,
                        mtime_t i_ts )
{
    subpicture_updater_t *p_upd = &p_subpicture->updater;
    subpicture_private_t *p_private = p_subpicture->p_private;

    if( !p_upd->pf_validate )
        return;
    if( !p_upd->pf_validate( p_subpicture,
                          !video_format_IsSimilar( p_fmt_src,
                                                   &p_private->src ), p_fmt_src,
                          !video_format_IsSimilar( p_fmt_dst,
                                                   &p_private->dst ), p_fmt_dst,
                          i_ts ) )
        return;

    subpicture_region_ChainDelete( p_subpicture->p_region );
    p_subpicture->p_region = NULL;

    p_upd->pf_update( p_subpicture, p_fmt_src, p_fmt_dst, i_ts );

    video_format_Clean( &p_private->src );
    video_format_Clean( &p_private->dst );

    video_format_Copy( &p_private->src, p_fmt_src );
    video_format_Copy( &p_private->dst, p_fmt_dst );
}
コード例 #2
0
ファイル: deinterlace.c プロジェクト: etix/vlc
static int Open(vlc_object_t *obj)
{
    filter_t *filter = (filter_t *)obj;

    if (filter->fmt_in.video.i_chroma != VLC_CODEC_VDPAU_VIDEO_420
     && filter->fmt_in.video.i_chroma != VLC_CODEC_VDPAU_VIDEO_422
     && filter->fmt_in.video.i_chroma != VLC_CODEC_VDPAU_VIDEO_444)
        return VLC_EGENERIC;
    if (!video_format_IsSimilar(&filter->fmt_in.video, &filter->fmt_out.video))
        return VLC_EGENERIC;

    filter_sys_t *sys = malloc(sizeof (*sys));
    if (unlikely(sys == NULL))
        return VLC_ENOMEM;

    /* NOTE: Only weave and bob are mandatory for the hardware to implement.
     * The other modes and IVTC should be checked. */

    sys->last_pts = VLC_TS_INVALID;

    filter->pf_video_filter = Deinterlace;
    filter->p_sys = sys;
    filter->fmt_out.video.i_frame_rate *= 2;
    return VLC_SUCCESS;
}
コード例 #3
0
ファイル: sharpen.c プロジェクト: mstorsjo/vlc
static int Open(vlc_object_t *obj)
{
    filter_t *filter = (filter_t *)obj;

    if (filter->fmt_in.video.i_chroma != VLC_CODEC_VDPAU_VIDEO_420
     && filter->fmt_in.video.i_chroma != VLC_CODEC_VDPAU_VIDEO_422
     && filter->fmt_in.video.i_chroma != VLC_CODEC_VDPAU_VIDEO_444)
        return VLC_EGENERIC;
    if (!video_format_IsSimilar(&filter->fmt_in.video, &filter->fmt_out.video))
        return VLC_EGENERIC;

    /* Check for sharpen support */
    vdp_t *vdp;
    VdpDevice device;
    VdpStatus err;
    VdpBool ok;

    err = vdp_get_x11(NULL, -1, &vdp, &device);
    if (err != VDP_STATUS_OK)
        return VLC_EGENERIC; /* Weird. The decoder should be active... */

    err = vdp_video_mixer_query_feature_support(vdp, device,
                                       VDP_VIDEO_MIXER_FEATURE_SHARPNESS, &ok);
    if (err != VDP_STATUS_OK)
        ok = VDP_FALSE;
    vdp_release_x11(vdp);

    if (ok != VDP_TRUE)
    {
        msg_Err(filter, "sharpening/blurring not supported by VDPAU device");
        return VLC_EGENERIC;
    }

    /* Initialization */
    filter_sys_t *sys = malloc(sizeof (*sys));
    if (unlikely(sys == NULL))
        return VLC_ENOMEM;

    filter->pf_video_filter = Sharpen;
    filter->p_sys = sys;

    config_ChainParse(filter, "sharpen-", options, filter->p_cfg);
    var_AddCallback(filter, "sharpen-sigma", SharpenCallback, sys);

    union { uint32_t u; float f; } u;
    u.f = vlc_to_vdp_sigma(var_CreateGetFloatCommand(filter, "sharpen-sigma"));
    atomic_init(&sys->sigma, u.u);

    return VLC_SUCCESS;
}
コード例 #4
0
ファイル: adjust.c プロジェクト: etix/vlc
static int Open(vlc_object_t *obj)
{
    filter_t *filter = (filter_t *)obj;

    if (filter->fmt_in.video.i_chroma != VLC_CODEC_VDPAU_VIDEO_420
     && filter->fmt_in.video.i_chroma != VLC_CODEC_VDPAU_VIDEO_422
     && filter->fmt_in.video.i_chroma != VLC_CODEC_VDPAU_VIDEO_444)
        return VLC_EGENERIC;
    if (!video_format_IsSimilar(&filter->fmt_in.video, &filter->fmt_out.video))
        return VLC_EGENERIC;

    filter_sys_t *sys = malloc(sizeof (*sys));
    if (unlikely(sys == NULL))
        return VLC_ENOMEM;

    filter->pf_video_filter = Adjust;
    filter->p_sys = sys;

    config_ChainParse(filter, "", options, filter->p_cfg);

    float f;
    int i;

    f = var_CreateGetFloatCommand(filter, "brightness");
    var_AddCallback(filter, "brightness", BrightnessCallback,
                    &sys->brightness);
    vlc_atomic_init_float(&sys->brightness, vlc_to_vdp_brightness(f));

    f = var_CreateGetFloatCommand(filter, "contrast");
    var_AddCallback(filter, "contrast", ContrastCallback, &sys->contrast);
    vlc_atomic_init_float(&sys->contrast, vlc_to_vdp_contrast(f));

    f = var_CreateGetFloatCommand(filter, "saturation");
    var_AddCallback(filter, "saturation", SaturationCallback,
                    &sys->saturation);
    vlc_atomic_init_float(&sys->saturation, vlc_to_vdp_saturation(f));

    i = var_CreateGetFloatCommand(filter, "hue");
    var_AddCallback(filter, "hue", HueCallback, &sys->hue);
    vlc_atomic_init_float(&sys->hue, vlc_to_vdp_hue(i));

    return VLC_SUCCESS;
}
コード例 #5
0
ファイル: es_format.c プロジェクト: CSRedRat/vlc
bool es_format_IsSimilar( const es_format_t *p_fmt1, const es_format_t *p_fmt2 )
{
    if( p_fmt1->i_cat != p_fmt2->i_cat ||
        vlc_fourcc_GetCodec( p_fmt1->i_cat, p_fmt1->i_codec ) !=
        vlc_fourcc_GetCodec( p_fmt2->i_cat, p_fmt2->i_codec ) )
        return false;

    switch( p_fmt1->i_cat )
    {
    case AUDIO_ES:
    {
        audio_format_t a1 = p_fmt1->audio;
        audio_format_t a2 = p_fmt2->audio;

        if( a1.i_format && a2.i_format && a1.i_format != a2.i_format )
            return false;
        if( a1.i_rate != a2.i_rate ||
            a1.i_physical_channels != a2.i_physical_channels ||
            a1.i_original_channels != a2.i_original_channels )
            return false;
        return true;
    }

    case VIDEO_ES:
    {
        video_format_t v1 = p_fmt1->video;
        video_format_t v2 = p_fmt2->video;
        if( !v1.i_chroma )
            v1.i_chroma = vlc_fourcc_GetCodec( p_fmt1->i_cat, p_fmt1->i_codec );
        if( !v2.i_chroma )
            v2.i_chroma = vlc_fourcc_GetCodec( p_fmt1->i_cat, p_fmt2->i_codec );
        return video_format_IsSimilar( &p_fmt1->video, &p_fmt2->video );
    }

    case SPU_ES:
    default:
        return true;
    }
}
コード例 #6
0
static int Init( filter_t *p_filter )
{
    filter_sys_t *p_sys = p_filter->p_sys;
    const video_format_t *p_fmti = &p_filter->fmt_in.video;
    video_format_t       *p_fmto = &p_filter->fmt_out.video;

    if( p_fmti->orientation != p_fmto->orientation )
        return VLC_EGENERIC;

    if( video_format_IsSimilar( p_fmti, &p_sys->fmt_in ) &&
        video_format_IsSimilar( p_fmto, &p_sys->fmt_out ) &&
        p_sys->ctx )
    {
        return VLC_SUCCESS;
    }

    Clean( p_filter );

    /* Init with new parameters */
    ScalerConfiguration cfg;
    if( GetParameters( &cfg, p_fmti, p_fmto, p_sys->i_sws_flags ) )
    {
        msg_Err( p_filter, "format not supported" );
        return VLC_EGENERIC;
    }
    if( p_fmti->i_visible_width <= 0 || p_fmti->i_visible_height <= 0 ||
        p_fmto->i_visible_width <= 0 || p_fmto->i_visible_height <= 0 )
    {
        msg_Err( p_filter, "invalid scaling: %ix%i -> %ix%i",
                 p_fmti->i_visible_width, p_fmti->i_visible_height,
                 p_fmto->i_visible_width, p_fmto->i_visible_height);
        return VLC_EGENERIC;
    }

    p_sys->desc_in = vlc_fourcc_GetChromaDescription( p_fmti->i_chroma );
    p_sys->desc_out = vlc_fourcc_GetChromaDescription( p_fmto->i_chroma );
    if( p_sys->desc_in == NULL || p_sys->desc_out == NULL )
        return VLC_EGENERIC;

    /* swscale does not like too small width */
    p_sys->i_extend_factor = 1;
    while( __MIN( p_fmti->i_visible_width, p_fmto->i_visible_width ) * p_sys->i_extend_factor < MINIMUM_WIDTH)
        p_sys->i_extend_factor++;

    const unsigned i_fmti_visible_width = p_fmti->i_visible_width * p_sys->i_extend_factor;
    const unsigned i_fmto_visible_width = p_fmto->i_visible_width * p_sys->i_extend_factor;
    for( int n = 0; n < (cfg.b_has_a ? 2 : 1); n++ )
    {
        const int i_fmti = n == 0 ? cfg.i_fmti : PIX_FMT_GRAY8;
        const int i_fmto = n == 0 ? cfg.i_fmto : PIX_FMT_GRAY8;
        struct SwsContext *ctx;

        ctx = sws_getContext( i_fmti_visible_width, p_fmti->i_visible_height, i_fmti,
                              i_fmto_visible_width, p_fmto->i_visible_height, i_fmto,
                              cfg.i_sws_flags | p_sys->i_cpu_mask,
                              p_sys->p_src_filter, p_sys->p_dst_filter, 0 );
        if( n == 0 )
            p_sys->ctx = ctx;
        else
            p_sys->ctxA = ctx;
    }
    if( p_sys->ctxA )
    {
        p_sys->p_src_a = picture_New( VLC_CODEC_GREY, i_fmti_visible_width, p_fmti->i_visible_height, 0, 1 );
        p_sys->p_dst_a = picture_New( VLC_CODEC_GREY, i_fmto_visible_width, p_fmto->i_visible_height, 0, 1 );
    }
    if( p_sys->i_extend_factor != 1 )
    {
        p_sys->p_src_e = picture_New( p_fmti->i_chroma, i_fmti_visible_width, p_fmti->i_visible_height, 0, 1 );
        p_sys->p_dst_e = picture_New( p_fmto->i_chroma, i_fmto_visible_width, p_fmto->i_visible_height, 0, 1 );

        if( p_sys->p_src_e )
            memset( p_sys->p_src_e->p[0].p_pixels, 0, p_sys->p_src_e->p[0].i_pitch * p_sys->p_src_e->p[0].i_lines );
        if( p_sys->p_dst_e )
            memset( p_sys->p_dst_e->p[0].p_pixels, 0, p_sys->p_dst_e->p[0].i_pitch * p_sys->p_dst_e->p[0].i_lines );
    }

    if( !p_sys->ctx ||
        ( cfg.b_has_a && ( !p_sys->ctxA || !p_sys->p_src_a || !p_sys->p_dst_a ) ) ||
        ( p_sys->i_extend_factor != 1 && ( !p_sys->p_src_e || !p_sys->p_dst_e ) ) )
    {
        msg_Err( p_filter, "could not init SwScaler and/or allocate memory" );
        Clean( p_filter );
        return VLC_EGENERIC;
    }

    if (p_filter->b_allow_fmt_out_change)
    {
        /*
         * If the transformation is not homothetic we must modify the
         * aspect ratio of the output format in order to have the
         * output picture displayed correctly and not stretched
         * horizontally or vertically.
         * WARNING: this is a hack, ideally this should not be needed
         * and the vout should update its video format instead.
         */
        unsigned i_sar_num = p_fmti->i_sar_num * p_fmti->i_visible_width;
        unsigned i_sar_den = p_fmti->i_sar_den * p_fmto->i_visible_width;
        vlc_ureduce(&i_sar_num, &i_sar_den, i_sar_num, i_sar_den, 65536);
        i_sar_num *= p_fmto->i_visible_height;
        i_sar_den *= p_fmti->i_visible_height;
        vlc_ureduce(&i_sar_num, &i_sar_den, i_sar_num, i_sar_den, 65536);
        p_fmto->i_sar_num = i_sar_num;
        p_fmto->i_sar_den = i_sar_den;
    }

    p_sys->b_add_a = cfg.b_add_a;
    p_sys->b_copy = cfg.b_copy;
    p_sys->fmt_in  = *p_fmti;
    p_sys->fmt_out = *p_fmto;
    p_sys->b_swap_uvi = cfg.b_swap_uvi;
    p_sys->b_swap_uvo = cfg.b_swap_uvo;

#if 0
    msg_Dbg( p_filter, "%ix%i (%ix%i) chroma: %4.4s -> %ix%i (%ix%i) chroma: %4.4s extend by %d",
             p_fmti->i_visible_width, p_fmti->i_visible_height, p_fmti->i_width, p_fmti->i_height, (char *)&p_fmti->i_chroma,
             p_fmto->i_visible_width, p_fmto->i_visible_height, p_fmto->i_width, p_fmto->i_height, (char *)&p_fmto->i_chroma,
             p_sys->i_extend_factor );
#endif
    return VLC_SUCCESS;
}
コード例 #7
0
ファイル: video.c プロジェクト: AsamQi/vlc
int transcode_video_process( sout_stream_t *p_stream, sout_stream_id_t *id,
                                    block_t *in, block_t **out )
{
    sout_stream_sys_t *p_sys = p_stream->p_sys;
    bool b_need_duplicate = false;
    picture_t *p_pic;
    *out = NULL;

    if( unlikely( in == NULL ) )
    {
        if( p_sys->i_threads == 0 )
        {
            block_t *p_block;
            do {
                p_block = id->p_encoder->pf_encode_video(id->p_encoder, NULL );
                block_ChainAppend( out, p_block );
            } while( p_block );
        }
        else
        {
            /*
             * FIXME: we need EncoderThread() to flush buffers and signal us
             * when it's done so we can send the last frames to the chain
             */
        }
        return VLC_SUCCESS;
    }


    while( (p_pic = id->p_decoder->pf_decode_video( id->p_decoder, &in )) )
    {

        if( p_stream->p_sout->i_out_pace_nocontrol && p_sys->b_hurry_up )
        {
            mtime_t current_date = mdate();
            if( unlikely( current_date + 50000 > p_pic->date ) )
            {
                msg_Dbg( p_stream, "late picture skipped (%"PRId64")",
                         current_date + 50000 - p_pic->date );
                picture_Release( p_pic );
                continue;
            }
        }

        if( p_sys->b_master_sync )
        {
            mtime_t i_master_drift = p_sys->i_master_drift;
            mtime_t i_pts = date_Get( &id->interpolated_pts ) + 1;
            mtime_t i_video_drift = p_pic->date - i_pts;

            if ( unlikely( i_video_drift > MASTER_SYNC_MAX_DRIFT
                  || i_video_drift < -MASTER_SYNC_MAX_DRIFT ) )
            {
                msg_Dbg( p_stream,
                    "drift is too high (%"PRId64", resetting master sync",
                    i_video_drift );
                date_Set( &id->interpolated_pts, p_pic->date );
                i_pts = p_pic->date + 1;
            }
            i_video_drift = p_pic->date - i_pts;
            b_need_duplicate = false;

            /* Set the pts of the frame being encoded */
            p_pic->date = i_pts;

            if( unlikely( i_video_drift < (i_master_drift - 50000) ) )
            {
#if 0
                msg_Dbg( p_stream, "dropping frame (%i)",
                         (int)(i_video_drift - i_master_drift) );
#endif
                picture_Release( p_pic );
                continue;
            }
            else if( unlikely( i_video_drift > (i_master_drift + 50000) ) )
            {
#if 0
                msg_Dbg( p_stream, "adding frame (%i)",
                         (int)(i_video_drift - i_master_drift) );
#endif
                b_need_duplicate = true;
            }
        }
        if( unlikely (
             id->p_encoder->p_module &&
             !video_format_IsSimilar( &p_sys->fmt_input_video, &id->p_decoder->fmt_out.video )
            )
          )
        {
            msg_Info( p_stream, "aspect-ratio changed, reiniting. %i -> %i : %i -> %i.",
                        p_sys->fmt_input_video.i_sar_num, id->p_decoder->fmt_out.video.i_sar_num,
                        p_sys->fmt_input_video.i_sar_den, id->p_decoder->fmt_out.video.i_sar_den
                    );
            /* Close filters */
            if( id->p_f_chain )
                filter_chain_Delete( id->p_f_chain );
            id->p_f_chain = NULL;
            if( id->p_uf_chain )
                filter_chain_Delete( id->p_uf_chain );
            id->p_uf_chain = NULL;

            /* Reinitialize filters */
            id->p_encoder->fmt_out.video.i_width  = p_sys->i_width & ~1;
            id->p_encoder->fmt_out.video.i_height = p_sys->i_height & ~1;
            id->p_encoder->fmt_out.video.i_sar_num = id->p_encoder->fmt_out.video.i_sar_den = 0;

            transcode_video_filter_init( p_stream, id );
            transcode_video_encoder_init( p_stream, id );
            conversion_video_filter_append( id );
            memcpy( &p_sys->fmt_input_video, &id->p_decoder->fmt_out.video, sizeof(video_format_t));
        }


        if( unlikely( !id->p_encoder->p_module ) )
        {
            if( id->p_f_chain )
                filter_chain_Delete( id->p_f_chain );
            if( id->p_uf_chain )
                filter_chain_Delete( id->p_uf_chain );
            id->p_f_chain = id->p_uf_chain = NULL;

            transcode_video_filter_init( p_stream, id );
            transcode_video_encoder_init( p_stream, id );
            conversion_video_filter_append( id );
            memcpy( &p_sys->fmt_input_video, &id->p_decoder->fmt_out.video, sizeof(video_format_t));

            if( transcode_video_encoder_open( p_stream, id ) != VLC_SUCCESS )
            {
                picture_Release( p_pic );
                transcode_video_close( p_stream, id );
                id->b_transcode = false;
                return VLC_EGENERIC;
            }
        }

        /* Run the filter and output chains; first with the picture,
         * and then with NULL as many times as we need until they
         * stop outputting frames.
         */
        for ( ;; ) {
            picture_t *p_filtered_pic = p_pic;

            /* Run filter chain */
            if( id->p_f_chain )
                p_filtered_pic = filter_chain_VideoFilter( id->p_f_chain, p_filtered_pic );
            if( !p_filtered_pic )
                break;

            for ( ;; ) {
                picture_t *p_user_filtered_pic = p_filtered_pic;

                /* Run user specified filter chain */
                if( id->p_uf_chain )
                    p_user_filtered_pic = filter_chain_VideoFilter( id->p_uf_chain, p_user_filtered_pic );
                if( !p_user_filtered_pic )
                    break;

                OutputFrame( p_sys, p_user_filtered_pic, b_need_duplicate, p_stream, id, out );
                b_need_duplicate = false;

                p_filtered_pic = NULL;
            }

            p_pic = NULL;
        }
    }

    if( p_sys->i_threads >= 1 )
    {
        /* Pick up any return data the encoder thread wants to output. */
        vlc_mutex_lock( &p_sys->lock_out );
        *out = p_sys->p_buffers;
        p_sys->p_buffers = NULL;
        vlc_mutex_unlock( &p_sys->lock_out );
    }

    return VLC_SUCCESS;
}
コード例 #8
0
ファイル: video.c プロジェクト: chucolin/vlc
int transcode_video_process( sout_stream_t *p_stream, sout_stream_id_t *id,
                                    block_t *in, block_t **out )
{
    sout_stream_sys_t *p_sys = p_stream->p_sys;
    picture_t *p_pic = NULL;
    *out = NULL;

    if( unlikely( in == NULL ) )
    {
        if( p_sys->i_threads == 0 )
        {
            block_t *p_block;
            do {
                p_block = id->p_encoder->pf_encode_video(id->p_encoder, NULL );
                block_ChainAppend( out, p_block );
            } while( p_block );
        }
        else
        {
            msg_Dbg( p_stream, "Flushing thread and waiting that");
            vlc_mutex_lock( &p_stream->p_sys->lock_out );
            p_stream->p_sys->b_abort = true;
            vlc_cond_signal( &p_stream->p_sys->cond );
            vlc_mutex_unlock( &p_stream->p_sys->lock_out );

            vlc_join( p_stream->p_sys->thread, NULL );
            vlc_mutex_lock( &p_sys->lock_out );
            *out = p_sys->p_buffers;
            p_sys->p_buffers = NULL;
            vlc_mutex_unlock( &p_sys->lock_out );

            msg_Dbg( p_stream, "Flushing done");
        }
        return VLC_SUCCESS;
    }


    while( (p_pic = id->p_decoder->pf_decode_video( id->p_decoder, &in )) )
    {

        if( p_stream->p_sout->i_out_pace_nocontrol && p_sys->b_hurry_up )
        {
            mtime_t current_date = mdate();
            if( unlikely( (current_date - 50000) > p_pic->date ) )
            {
                msg_Dbg( p_stream, "late picture skipped (%"PRId64")",
                         current_date - 50000 - p_pic->date );
                picture_Release( p_pic );
                continue;
            }
        }

        if( unlikely (
             id->p_encoder->p_module &&
             !video_format_IsSimilar( &p_sys->fmt_input_video, &id->p_decoder->fmt_out.video )
            )
          )
        {
            msg_Info( p_stream, "aspect-ratio changed, reiniting. %i -> %i : %i -> %i.",
                        p_sys->fmt_input_video.i_sar_num, id->p_decoder->fmt_out.video.i_sar_num,
                        p_sys->fmt_input_video.i_sar_den, id->p_decoder->fmt_out.video.i_sar_den
                    );
            /* Close filters */
            if( id->p_f_chain )
                filter_chain_Delete( id->p_f_chain );
            id->p_f_chain = NULL;
            if( id->p_uf_chain )
                filter_chain_Delete( id->p_uf_chain );
            id->p_uf_chain = NULL;

            /* Reinitialize filters */
            id->p_encoder->fmt_out.video.i_visible_width  = p_sys->i_width & ~1;
            id->p_encoder->fmt_out.video.i_visible_height = p_sys->i_height & ~1;
            id->p_encoder->fmt_out.video.i_sar_num = id->p_encoder->fmt_out.video.i_sar_den = 0;

            transcode_video_filter_init( p_stream, id );
            transcode_video_encoder_init( p_stream, id );
            conversion_video_filter_append( id );
            memcpy( &p_sys->fmt_input_video, &id->p_decoder->fmt_out.video, sizeof(video_format_t));
        }


        if( unlikely( !id->p_encoder->p_module ) )
        {
            if( id->p_f_chain )
                filter_chain_Delete( id->p_f_chain );
            if( id->p_uf_chain )
                filter_chain_Delete( id->p_uf_chain );
            id->p_f_chain = id->p_uf_chain = NULL;

            transcode_video_filter_init( p_stream, id );
            transcode_video_encoder_init( p_stream, id );
            conversion_video_filter_append( id );
            memcpy( &p_sys->fmt_input_video, &id->p_decoder->fmt_out.video, sizeof(video_format_t));

            if( transcode_video_encoder_open( p_stream, id ) != VLC_SUCCESS )
            {
                picture_Release( p_pic );
                transcode_video_close( p_stream, id );
                id->b_transcode = false;
                return VLC_EGENERIC;
            }
        }

        /*Input lipsync and drop check */
        if( p_sys->b_master_sync )
        {
            /* How much audio has drifted */
            mtime_t i_master_drift = p_sys->i_master_drift;

            /* This is the pts input should have now with constant frame rate */
            mtime_t i_pts = date_Get( &id->interpolated_pts );

            /* How much video pts is ahead of calculated pts */
            mtime_t i_video_drift = p_pic->date - i_pts;

            /* Check that we are having lipsync with input here */
            if( unlikely ( ( (i_video_drift - i_master_drift ) > MASTER_SYNC_MAX_DRIFT
                          || (i_video_drift + i_master_drift ) < -MASTER_SYNC_MAX_DRIFT ) ) )
            {
                msg_Warn( p_stream,
                    "video drift too big, resetting sync %"PRId64" to %"PRId64,
                    (i_video_drift + i_master_drift),
                    p_pic->date
                    );
                date_Set( &id->interpolated_pts, p_pic->date );
                date_Set( &id->next_output_pts, p_pic->date );
                i_pts = date_Get( &id->interpolated_pts );
            }

            /* Set the pts of the frame being encoded */
            p_pic->date = i_pts;

            /* now take next input pts, pts dates are only enabled if p_module is set*/
            date_Increment( &id->interpolated_pts, id->p_decoder->fmt_out.video.i_frame_rate_base );


            /* If input pts + input_frame_interval is lower than next_output_pts - output_frame_interval
             * Then the future input frame should fit better and we can drop this one 
             *
             * Duplication need is checked in OutputFrame */
            if( ( p_pic->date + (mtime_t)id->i_input_frame_interval ) <
                ( date_Get( &id->next_output_pts ) ) )
            {
#if 0
                msg_Dbg( p_stream, "dropping frame (%"PRId64" + %"PRId64" vs %"PRId64")",
                         p_pic->date, id->i_input_frame_interval, date_Get(&id->next_output_pts) );
#endif
                picture_Release( p_pic );
                continue;
            }
#if 0
            msg_Dbg( p_stream, "not dropping frame");
#endif

            /* input calculated pts isn't necessary what pts output should be, so use output pts*/
            p_pic->date = date_Get( &id->next_output_pts );


        }

        /* Run the filter and output chains; first with the picture,
         * and then with NULL as many times as we need until they
         * stop outputting frames.
         */
        for ( ;; ) {
            picture_t *p_filtered_pic = p_pic;

            /* Run filter chain */
            if( id->p_f_chain )
                p_filtered_pic = filter_chain_VideoFilter( id->p_f_chain, p_filtered_pic );
            if( !p_filtered_pic )
                break;

            for ( ;; ) {
                picture_t *p_user_filtered_pic = p_filtered_pic;

                /* Run user specified filter chain */
                if( id->p_uf_chain )
                    p_user_filtered_pic = filter_chain_VideoFilter( id->p_uf_chain, p_user_filtered_pic );
                if( !p_user_filtered_pic )
                    break;

                OutputFrame( p_sys, p_user_filtered_pic, p_stream, id, out );

                p_filtered_pic = NULL;
            }

            p_pic = NULL;
        }
    }

    if( p_sys->i_threads >= 1 )
    {
        /* Pick up any return data the encoder thread wants to output. */
        vlc_mutex_lock( &p_sys->lock_out );
        *out = p_sys->p_buffers;
        p_sys->p_buffers = NULL;
        vlc_mutex_unlock( &p_sys->lock_out );
    }

    return VLC_SUCCESS;
}
コード例 #9
0
ファイル: video.c プロジェクト: 5UN5H1N3/vlc
int transcode_video_process( sout_stream_t *p_stream, sout_stream_id_sys_t *id,
                                    block_t *in, block_t **out )
{
    sout_stream_sys_t *p_sys = p_stream->p_sys;
    picture_t *p_pic = NULL;
    *out = NULL;

    if( unlikely( in == NULL ) )
    {
        if( p_sys->i_threads == 0 )
        {
            block_t *p_block;
            do {
                p_block = id->p_encoder->pf_encode_video(id->p_encoder, NULL );
                block_ChainAppend( out, p_block );
            } while( p_block );
        }
        else
        {
            msg_Dbg( p_stream, "Flushing thread and waiting that");
            vlc_mutex_lock( &p_stream->p_sys->lock_out );
            p_stream->p_sys->b_abort = true;
            vlc_cond_signal( &p_stream->p_sys->cond );
            vlc_mutex_unlock( &p_stream->p_sys->lock_out );

            vlc_join( p_stream->p_sys->thread, NULL );
            vlc_mutex_lock( &p_sys->lock_out );
            *out = p_sys->p_buffers;
            p_sys->p_buffers = NULL;
            vlc_mutex_unlock( &p_sys->lock_out );

            msg_Dbg( p_stream, "Flushing done");
        }
        return VLC_SUCCESS;
    }


    while( (p_pic = id->p_decoder->pf_decode_video( id->p_decoder, &in )) )
    {

        if( unlikely (
             id->p_encoder->p_module &&
             !video_format_IsSimilar( &p_sys->fmt_input_video, &id->p_decoder->fmt_out.video )
            )
          )
        {
            msg_Info( p_stream, "aspect-ratio changed, reiniting. %i -> %i : %i -> %i.",
                        p_sys->fmt_input_video.i_sar_num, id->p_decoder->fmt_out.video.i_sar_num,
                        p_sys->fmt_input_video.i_sar_den, id->p_decoder->fmt_out.video.i_sar_den
                    );
            /* Close filters */
            if( id->p_f_chain )
                filter_chain_Delete( id->p_f_chain );
            id->p_f_chain = NULL;
            if( id->p_uf_chain )
                filter_chain_Delete( id->p_uf_chain );
            id->p_uf_chain = NULL;

            /* Reinitialize filters */
            id->p_encoder->fmt_out.video.i_visible_width  = p_sys->i_width & ~1;
            id->p_encoder->fmt_out.video.i_visible_height = p_sys->i_height & ~1;
            id->p_encoder->fmt_out.video.i_sar_num = id->p_encoder->fmt_out.video.i_sar_den = 0;

            transcode_video_filter_init( p_stream, id );
            transcode_video_encoder_init( p_stream, id );
            conversion_video_filter_append( id );
            memcpy( &p_sys->fmt_input_video, &id->p_decoder->fmt_out.video, sizeof(video_format_t));
        }


        if( unlikely( !id->p_encoder->p_module ) )
        {
            if( id->p_f_chain )
                filter_chain_Delete( id->p_f_chain );
            if( id->p_uf_chain )
                filter_chain_Delete( id->p_uf_chain );
            id->p_f_chain = id->p_uf_chain = NULL;

            transcode_video_filter_init( p_stream, id );
            transcode_video_encoder_init( p_stream, id );
            conversion_video_filter_append( id );
            memcpy( &p_sys->fmt_input_video, &id->p_decoder->fmt_out.video, sizeof(video_format_t));

            if( transcode_video_encoder_open( p_stream, id ) != VLC_SUCCESS )
            {
                picture_Release( p_pic );
                transcode_video_close( p_stream, id );
                id->b_transcode = false;
                return VLC_EGENERIC;
            }
            date_Set( &id->next_output_pts, p_pic->date );
        }

        /*Input lipsync and drop check */
        if( p_sys->b_master_sync )
        {
            /* If input pts lower than next_output_pts - output_frame_interval
             * Then the future input frame should fit better and we can drop this one 
             *
             * We check this here as we don't need to run video filter at all for pictures
             * we are going to drop anyway
             *
             * Duplication need is checked in OutputFrame */
            if( ( p_pic->date ) <
                ( date_Get( &id->next_output_pts ) - (mtime_t)id->i_output_frame_interval ) )
            {
#if 0
                msg_Dbg( p_stream, "dropping frame (%"PRId64" + %"PRId64" vs %"PRId64")",
                         p_pic->date, id->i_input_frame_interval, date_Get(&id->next_output_pts) );
#endif
                picture_Release( p_pic );
                continue;
            }
#if 0
            msg_Dbg( p_stream, "not dropping frame");
#endif

        }

        /* Run the filter and output chains; first with the picture,
         * and then with NULL as many times as we need until they
         * stop outputting frames.
         */
        for ( ;; ) {
            picture_t *p_filtered_pic = p_pic;

            /* Run filter chain */
            if( id->p_f_chain )
                p_filtered_pic = filter_chain_VideoFilter( id->p_f_chain, p_filtered_pic );
            if( !p_filtered_pic )
                break;

            for ( ;; ) {
                picture_t *p_user_filtered_pic = p_filtered_pic;

                /* Run user specified filter chain */
                if( id->p_uf_chain )
                    p_user_filtered_pic = filter_chain_VideoFilter( id->p_uf_chain, p_user_filtered_pic );
                if( !p_user_filtered_pic )
                    break;

                OutputFrame( p_stream, p_user_filtered_pic, id, out );

                p_filtered_pic = NULL;
            }

            p_pic = NULL;
        }
    }

    if( p_sys->i_threads >= 1 )
    {
        /* Pick up any return data the encoder thread wants to output. */
        vlc_mutex_lock( &p_sys->lock_out );
        *out = p_sys->p_buffers;
        p_sys->p_buffers = NULL;
        vlc_mutex_unlock( &p_sys->lock_out );
    }

    return VLC_SUCCESS;
}
コード例 #10
0
ファイル: dxva2_deinterlace.c プロジェクト: IAPark/vlc
static int Open(vlc_object_t *obj)
{
    filter_t *filter = (filter_t *)obj;
    filter_sys_t *sys = NULL;
    HINSTANCE hdecoder_dll = NULL;
    HINSTANCE d3d9_dll = NULL;
    HRESULT hr;
    picture_t *dst = NULL;
    GUID *processorGUIDs = NULL;
    GUID *processorGUID = NULL;
    IDirectXVideoProcessorService *processor = NULL;

    if (filter->fmt_in.video.i_chroma != VLC_CODEC_D3D9_OPAQUE
     && filter->fmt_in.video.i_chroma != VLC_CODEC_D3D9_OPAQUE_10B)
        return VLC_EGENERIC;
    if (!video_format_IsSimilar(&filter->fmt_in.video, &filter->fmt_out.video))
        return VLC_EGENERIC;

    d3d9_dll = LoadLibrary(TEXT("D3D9.DLL"));
    if (!d3d9_dll)
        goto error;

    hdecoder_dll = LoadLibrary(TEXT("DXVA2.DLL"));
    if (!hdecoder_dll)
        goto error;

    dst = filter_NewPicture(filter);
    if (dst == NULL)
        goto error;

    if (!dst->p_sys)
    {
        msg_Dbg(filter, "D3D9 opaque without a texture");
        goto error;
    }

    sys = calloc(1, sizeof (*sys));
    if (unlikely(sys == NULL))
        goto error;

    HRESULT (WINAPI *CreateVideoService)(IDirect3DDevice9 *,
                                         REFIID riid,
                                         void **ppService);
    CreateVideoService =
      (void *)GetProcAddress(hdecoder_dll, "DXVA2CreateVideoService");
    if (CreateVideoService == NULL)
        goto error;

    hr = IDirect3DSurface9_GetDevice( dst->p_sys->surface, &sys->d3ddev );
    if (FAILED(hr))
        goto error;

    D3DSURFACE_DESC dstDesc;
    hr = IDirect3DSurface9_GetDesc( dst->p_sys->surface, &dstDesc );
    if (unlikely(FAILED(hr)))
        goto error;

    hr = CreateVideoService( sys->d3ddev, &IID_IDirectXVideoProcessorService,
                            (void**)&processor);
    if (FAILED(hr))
        goto error;

    DXVA2_VideoDesc dsc;
    ZeroMemory(&dsc, sizeof(dsc));
    dsc.SampleWidth     = dstDesc.Width;
    dsc.SampleHeight    = dstDesc.Height;
    dsc.Format          = dstDesc.Format;
    if (filter->fmt_in.video.i_frame_rate && filter->fmt_in.video.i_frame_rate_base) {
        dsc.InputSampleFreq.Numerator   = filter->fmt_in.video.i_frame_rate;
        dsc.InputSampleFreq.Denominator = filter->fmt_in.video.i_frame_rate_base;
    } else {
        dsc.InputSampleFreq.Numerator   = 0;
        dsc.InputSampleFreq.Denominator = 0;
    }
    dsc.OutputFrameFreq = dsc.InputSampleFreq;

    DXVA2_ExtendedFormat *pFormat = &dsc.SampleFormat;
    pFormat->SampleFormat = dst->b_top_field_first ?
                DXVA2_SampleFieldInterleavedEvenFirst :
                DXVA2_SampleFieldInterleavedOddFirst;

    UINT count = 0;
    hr = IDirectXVideoProcessorService_GetVideoProcessorDeviceGuids( processor,
                                                                &dsc,
                                                                &count,
                                                                &processorGUIDs);
    if (FAILED(hr))
        goto error;

    char *psz_mode = var_InheritString( filter, "deinterlace-mode" );
    const struct filter_mode_t *p_mode = GetFilterMode(psz_mode);
    if (p_mode == NULL)
    {
        msg_Dbg(filter, "unknown mode %s, trying blend", psz_mode);
        p_mode = GetFilterMode("blend");
    }
    if (strcmp(p_mode->psz_mode, psz_mode))
        msg_Dbg(filter, "using %s deinterlacing mode", p_mode->psz_mode);

    DXVA2_VideoProcessorCaps caps, best_caps;
    unsigned best_score = 0;
    for (UINT i=0; i<count; ++i) {
        hr = IDirectXVideoProcessorService_GetVideoProcessorCaps( processor,
                                                                  processorGUIDs+i,
                                                                  &dsc,
                                                                  dsc.Format,
                                                                  &caps);
        if ( FAILED(hr) || !caps.DeinterlaceTechnology )
            continue;

        unsigned score = (caps.DeinterlaceTechnology & p_mode->i_mode) ? 10 : 1;
        if (best_score < score) {
            best_score = score;
            best_caps = caps;
            processorGUID = processorGUIDs + i;
        }
    }

    if (processorGUID == NULL)
    {
        msg_Dbg(filter, "Could not find a filter to output the required format");
        goto error;
    }

    hr = IDirectXVideoProcessorService_CreateVideoProcessor( processor,
                                                             processorGUID,
                                                             &dsc,
                                                             dsc.Format,
                                                             1,
                                                             &sys->processor );
    if (FAILED(hr))
        goto error;

    hr = IDirectXVideoProcessorService_CreateSurface( processor,
                                                      dstDesc.Width,
                                                      dstDesc.Height,
                                                      0,
                                                      dstDesc.Format,
                                                      D3DPOOL_DEFAULT,
                                                      0,
                                                      DXVA2_VideoProcessorRenderTarget,
                                                      &sys->hw_surface,
                                                      NULL);
    if (FAILED(hr))
        goto error;

    sys->hdecoder_dll = hdecoder_dll;
    sys->d3d9_dll     = d3d9_dll;
    sys->decoder_caps = best_caps;

    InitDeinterlacingContext( &sys->context );

    sys->context.settings = p_mode->settings;
    sys->context.settings.b_use_frame_history = best_caps.NumBackwardRefSamples != 0 ||
                                       best_caps.NumForwardRefSamples  != 0;
    if (sys->context.settings.b_use_frame_history != p_mode->settings.b_use_frame_history)
        msg_Dbg( filter, "deinterlacing not using frame history as requested");
    if (sys->context.settings.b_double_rate)
        sys->context.pf_render_ordered = RenderPic;
    else
        sys->context.pf_render_single_pic = RenderSinglePic;

    video_format_t out_fmt;
    GetDeinterlacingOutput( &sys->context, &out_fmt, &filter->fmt_in.video );
    if( !filter->b_allow_fmt_out_change &&
         out_fmt.i_height != filter->fmt_in.video.i_height )
    {
       goto error;
    }

    CoTaskMemFree(processorGUIDs);
    IDirectXVideoProcessorService_Release(processor);
    picture_Release(dst);

    sys->buffer_new = filter->owner.video.buffer_new;
    filter->owner.video.buffer_new = NewOutputPicture;
    filter->fmt_out.video   = out_fmt;
    filter->pf_video_filter = Deinterlace;
    filter->pf_flush        = Flush;
    filter->p_sys = sys;

    return VLC_SUCCESS;
error:
    CoTaskMemFree(processorGUIDs);
    if (sys && sys->processor)
        IDirectXVideoProcessor_Release( sys->processor );
    if (processor)
        IDirectXVideoProcessorService_Release(processor);
    if (sys && sys->d3ddev)
        IDirect3DDevice9_Release( sys->d3ddev );
    if (hdecoder_dll)
        FreeLibrary(hdecoder_dll);
    if (d3d9_dll)
        FreeLibrary(d3d9_dll);
    if (dst)
        picture_Release(dst);
    free(sys);

    return VLC_EGENERIC;
}
コード例 #11
0
ファイル: swscale.c プロジェクト: 371816210/vlc_vlc
static int Init( filter_t *p_filter )
{
    filter_sys_t *p_sys = p_filter->p_sys;
    const video_format_t *p_fmti = &p_filter->fmt_in.video;
    video_format_t       *p_fmto = &p_filter->fmt_out.video;

    if( p_fmti->orientation != p_fmto->orientation )
        return VLC_EGENERIC;

    if( video_format_IsSimilar( p_fmti, &p_sys->fmt_in ) &&
        video_format_IsSimilar( p_fmto, &p_sys->fmt_out ) &&
        p_sys->ctx )
    {
        return VLC_SUCCESS;
    }

    Clean( p_filter );

    /* Init with new parameters */
    ScalerConfiguration cfg;
    if( GetParameters( &cfg, p_fmti, p_fmto, p_sys->i_sws_flags ) )
    {
        msg_Err( p_filter, "format not supported" );
        return VLC_EGENERIC;
    }
    if( p_fmti->i_visible_width <= 0 || p_fmti->i_visible_height <= 0 ||
        p_fmto->i_visible_width <= 0 || p_fmto->i_visible_height <= 0 )
    {
        msg_Err( p_filter, "invalid scaling: %ix%i -> %ix%i",
                 p_fmti->i_visible_width, p_fmti->i_visible_height,
                 p_fmto->i_visible_width, p_fmto->i_visible_height);
        return VLC_EGENERIC;
    }

    /* swscale does not like too small width */
    p_sys->i_extend_factor = 1;
    while( __MIN( p_fmti->i_visible_width, p_fmto->i_visible_width ) * p_sys->i_extend_factor < MINIMUM_WIDTH)
        p_sys->i_extend_factor++;

    const unsigned i_fmti_visible_width = p_fmti->i_visible_width * p_sys->i_extend_factor;
    const unsigned i_fmto_visible_width = p_fmto->i_visible_width * p_sys->i_extend_factor;
    for( int n = 0; n < (cfg.b_has_a ? 2 : 1); n++ )
    {
        const int i_fmti = n == 0 ? cfg.i_fmti : PIX_FMT_GRAY8;
        const int i_fmto = n == 0 ? cfg.i_fmto : PIX_FMT_GRAY8;
        struct SwsContext *ctx;

        ctx = sws_getContext( i_fmti_visible_width, p_fmti->i_visible_height, i_fmti,
                              i_fmto_visible_width, p_fmto->i_visible_height, i_fmto,
                              cfg.i_sws_flags | p_sys->i_cpu_mask,
                              p_sys->p_src_filter, p_sys->p_dst_filter, 0 );
        if( n == 0 )
            p_sys->ctx = ctx;
        else
            p_sys->ctxA = ctx;
    }
    if( p_sys->ctxA )
    {
        p_sys->p_src_a = picture_New( VLC_CODEC_GREY, i_fmti_visible_width, p_fmti->i_visible_height, 0, 1 );
        p_sys->p_dst_a = picture_New( VLC_CODEC_GREY, i_fmto_visible_width, p_fmto->i_visible_height, 0, 1 );
    }
    if( p_sys->i_extend_factor != 1 )
    {
        p_sys->p_src_e = picture_New( p_fmti->i_chroma, i_fmti_visible_width, p_fmti->i_visible_height, 0, 1 );
        p_sys->p_dst_e = picture_New( p_fmto->i_chroma, i_fmto_visible_width, p_fmto->i_visible_height, 0, 1 );

        if( p_sys->p_src_e )
            memset( p_sys->p_src_e->p[0].p_pixels, 0, p_sys->p_src_e->p[0].i_pitch * p_sys->p_src_e->p[0].i_lines );
        if( p_sys->p_dst_e )
            memset( p_sys->p_dst_e->p[0].p_pixels, 0, p_sys->p_dst_e->p[0].i_pitch * p_sys->p_dst_e->p[0].i_lines );
    }

    if( !p_sys->ctx ||
        ( cfg.b_has_a && ( !p_sys->ctxA || !p_sys->p_src_a || !p_sys->p_dst_a ) ) ||
        ( p_sys->i_extend_factor != 1 && ( !p_sys->p_src_e || !p_sys->p_dst_e ) ) )
    {
        msg_Err( p_filter, "could not init SwScaler and/or allocate memory" );
        Clean( p_filter );
        return VLC_EGENERIC;
    }

    p_sys->b_add_a = cfg.b_add_a;
    p_sys->b_copy = cfg.b_copy;
    p_sys->fmt_in  = *p_fmti;
    p_sys->fmt_out = *p_fmto;
    p_sys->b_swap_uvi = cfg.b_swap_uvi;
    p_sys->b_swap_uvo = cfg.b_swap_uvo;

#if 0
    msg_Dbg( p_filter, "%ix%i (%ix%i) chroma: %4.4s -> %ix%i (%ix%i) chroma: %4.4s extend by %d",
             p_fmti->i_visible_width, p_fmti->i_visible_height, p_fmti->i_width, p_fmti->i_height, (char *)&p_fmti->i_chroma,
             p_fmto->i_visible_width, p_fmto->i_visible_height, p_fmto->i_width, p_fmto->i_height, (char *)&p_fmto->i_chroma,
             p_sys->i_extend_factor );
#endif
    return VLC_SUCCESS;
}