static int amc_queue_picture_buffer(
    IJKFF_Pipenode            *node,
    int                        output_buffer_index,
    int                        acodec_serial,
    SDL_AMediaCodecBufferInfo *buffer_info)
{
    IJKFF_Pipenode_Opaque *opaque     = node->opaque;
    FFPlayer              *ffp        = opaque->ffp;
    VideoState            *is         = ffp->is;
    AVFrame     picture;
    AVRational  tb         = is->video_st->time_base;
    AVRational  frame_rate = av_guess_frame_rate(is->ic, is->video_st, NULL);
    double      duration = (frame_rate.num && frame_rate.den ? av_q2d((AVRational){frame_rate.den, frame_rate.num}) : 0);
    int64_t     amc_pts = av_rescale_q(buffer_info->presentationTimeUs, AV_TIME_BASE_Q, is->video_st->time_base);
    double      pts = amc_pts < 0 ? NAN : amc_pts * av_q2d(tb);

    memset(&picture, 0, sizeof(AVFrame));
    picture.opaque = SDL_VoutAndroid_obtainBufferProxy(opaque->weak_vout, acodec_serial, output_buffer_index);
    picture.width  = opaque->frame_width;
    picture.height = opaque->frame_height;
    picture.format = SDL_FCC__AMC;
    picture.sample_aspect_ratio = opaque->avctx->sample_aspect_ratio;

    int ret = ffp_queue_picture(opaque->ffp, &picture, pts, duration, 0, is->viddec.pkt_serial);
    if (ret) {
        if (picture.opaque && buffer_info >= 0)
            SDL_VoutAndroid_releaseBufferProxyP(opaque->weak_vout, (SDL_AMediaCodecBufferProxy **)&picture.opaque, false);
    }
    return ret;
}
Exemplo n.º 2
0
 sf::Time Stream::packetDuration(const AVPacket* packet) const
 {
     CHECK(packet, "inconcistency error: null packet");
     CHECK(packet->stream_index == m_streamID, "Asking for duration of a packet for a different stream!");
     
     if (packet->duration != 0)
     {
         AVRational seconds = av_mul_q(av_make_q(packet->duration, 1), m_stream->time_base);
         return sf::seconds(av_q2d(seconds));
     }
     else
     {
         return sf::seconds(1. / av_q2d(av_guess_frame_rate(m_formatCtx, m_stream, nullptr)));
     }
 }
static int amc_queue_picture_buffer(
    IJKFF_Pipenode            *node,
    int                        output_buffer_index,
    SDL_AMediaCodecBufferInfo *buffer_info)
{
    IJKFF_Pipenode_Opaque *opaque     = node->opaque;
    FFPlayer              *ffp        = opaque->ffp;
    VideoState            *is         = ffp->is;
    AVRational             tb         = is->video_st->time_base;
    AVRational             frame_rate = av_guess_frame_rate(is->ic, is->video_st, NULL);

    double duration = (frame_rate.num && frame_rate.den ? av_q2d((AVRational){frame_rate.den, frame_rate.num}) : 0);
    int64_t amc_pts = av_rescale_q(buffer_info->presentationTimeUs, AV_TIME_BASE_Q, is->video_st->time_base);
    double pts = amc_pts < 0 ? NAN : amc_pts * av_q2d(tb);
    // ALOGE("got_frame: %lld -> %lf", bufferInfo.presentationTimeUs, pts);
    return amc_queue_picture(node, opaque->acodec, output_buffer_index, buffer_info, pts, duration, 0, is->viddec.pkt_serial);
}
static int amc_queue_picture_buffer(
    IJKFF_Pipenode            *node,
    int                        output_buffer_index,
    SDL_AMediaCodecBufferInfo *buffer_info)
{
    IJKFF_Pipenode_Opaque *opaque     = node->opaque;
    FFPlayer              *ffp        = opaque->ffp;
    VideoState            *is         = ffp->is;
    AVFrame     picture;
    AVRational  tb         = is->video_st->time_base;
    AVRational  frame_rate = av_guess_frame_rate(is->ic, is->video_st, NULL);
    double      duration = (frame_rate.num && frame_rate.den ? av_q2d((AVRational){frame_rate.den, frame_rate.num}) : 0);
    int64_t     amc_pts = av_rescale_q(buffer_info->presentationTimeUs, AV_TIME_BASE_Q, is->video_st->time_base);
    double      pts = amc_pts < 0 ? NAN : amc_pts * av_q2d(tb);

    picture.opaque = (void *)(intptr_t)output_buffer_index;
    picture.width  = opaque->frame_width;
    picture.height = opaque->frame_height;
    picture.format = SDL_FCC__AMC;
    picture.sample_aspect_ratio = opaque->avctx->sample_aspect_ratio;

    return ffp_queue_picture(opaque->ffp, &picture, pts, duration, 0, is->viddec.pkt_serial);
}
Exemplo n.º 5
0
int VideoDecoder::video_thread(void *arg)
{
    VideoState *is = (VideoState *) arg;
    AVStreamsParser* ps = is->getAVStreamsParser();
    AVFrame *frame = av_frame_alloc();
    double pts;
    double duration;
    int ret;
    AVRational tb = ps->video_st->time_base;
    AVRational frame_rate = av_guess_frame_rate(ps->ic, ps->video_st, NULL);

#if CONFIG_AVFILTER
    AVFilterGraph *graph = avfilter_graph_alloc();
    AVFilterContext *filt_out = NULL, *filt_in = NULL;
    int last_w = 0;
    int last_h = 0;
    enum AVPixelFormat last_format = (AVPixelFormat) (-2);
    int last_serial = -1;
    int last_vfilter_idx = 0;
    if (!graph) {
        av_frame_free(&frame);
        return AVERROR(ENOMEM);
    }

#endif

    if (!frame) {
#if CONFIG_AVFILTER
        avfilter_graph_free(&graph);
#endif
        return AVERROR(ENOMEM);
    }

    for (;;) {
        ret = is->viddec().get_video_frame(is, frame);
        if (ret < 0)
            goto the_end;
        if (!ret)
            continue;

#if CONFIG_AVFILTER
        if (   last_w != frame->width
            || last_h != frame->height
            || last_format != frame->format
            || last_serial != is->viddec().pkt_serial
            || last_vfilter_idx != is->vfilter_idx) {
            av_log(NULL, AV_LOG_DEBUG,
                   "Video frame changed from size:%dx%d format:%s serial:%d to size:%dx%d format:%s serial:%d\n",
                   last_w, last_h,
                   (const char *)av_x_if_null(av_get_pix_fmt_name(last_format), "none"), last_serial,
                   frame->width, frame->height,
                   (const char *)av_x_if_null(av_get_pix_fmt_name((AVPixelFormat)frame->format), "none"), is->viddec().pkt_serial);
            avfilter_graph_free(&graph);
            graph = avfilter_graph_alloc();
            if ((ret = configure_video_filters(graph, is,gOptions.vfilters_list ? gOptions.vfilters_list[is->vfilter_idx] : NULL, frame)) < 0) {
                SDL_Event event;
                event.type = FF_QUIT_EVENT;
                event.user.data1 = is;
                SDL_PushEvent(&event);
                goto the_end;
            }
            filt_in  = is->in_video_filter;
            filt_out = is->out_video_filter;
            last_w = frame->width;
            last_h = frame->height;
            last_format = (AVPixelFormat) frame->format;
            last_serial = is->viddec().pkt_serial;
            last_vfilter_idx = is->vfilter_idx;
            frame_rate = filt_out->inputs[0]->frame_rate;
        }

        ret = av_buffersrc_add_frame(filt_in, frame);
        if (ret < 0)
            goto the_end;

        while (ret >= 0) {
            is->frame_last_returned_time = av_gettime_relative() / 1000000.0;

            ret = av_buffersink_get_frame_flags(filt_out, frame, 0);
            if (ret < 0) {
                if (ret == AVERROR_EOF)
                    is->viddec().finished = is->viddec().pkt_serial;
                ret = 0;
                break;
            }

            is->frame_last_filter_delay = av_gettime_relative() / 1000000.0 - is->frame_last_returned_time;
            if (fabs(is->frame_last_filter_delay) > AV_NOSYNC_THRESHOLD / 10.0)
                is->frame_last_filter_delay = 0;
            tb = filt_out->inputs[0]->time_base;
#endif
            duration = (frame_rate.num && frame_rate.den ? av_q2d((AVRational){frame_rate.den, frame_rate.num}) : 0);
            pts = (frame->pts == AV_NOPTS_VALUE) ? NAN : frame->pts * av_q2d(tb);
            ret = queue_picture(is, frame, pts, duration, av_frame_get_pkt_pos(frame), is->viddec().pkt_serial);
            av_frame_unref(frame);
#if CONFIG_AVFILTER
        }
#endif

        if (ret < 0)
            goto the_end;
    }
 the_end:
#if CONFIG_AVFILTER
    avfilter_graph_free(&graph);
#endif
    av_frame_free(&frame);
    return 0;
}
Exemplo n.º 6
0
static int decode_packet(int *got_frame, int cached)
{
    int decoded = pkt.size; // size in bytes
    double frame_duration = (double) video_stream->time_base.num / (double) video_stream->time_base.den;
    char buffer[1024];
    int length = 0;

    length = sprintf(buffer, ";");
    *got_frame = 0;

    if (pkt.stream_index == video_stream_idx) {
        int ret = avcodec_decode_video2(video_dec_ctx, frame, got_frame, &pkt);
        if (ret < 0) {
            fprintf(stderr, "Error decoding video frame (%s)\n", av_err2str(ret));
            return ret;
        }
        if (*got_frame)
        {
            switch(frame->key_frame)
            {
                case 1:
                    length+= sprintf(buffer + length, "I        QPEL;");
                    break;
                case 0:
                    length+= sprintf(buffer + length, "P        QPEL;");
                    break;
                default:
                    printf("Unknown picture type: %c\n", av_get_picture_type_char(frame->pict_type));
                    return decoded;
            }

            double time_base = fmod((pkt.dts * frame_duration), 1.0); 

            if (last_time_base > time_base) // wrap around
            {
                bitRateSum = 0;
            }

            bitRateSum += decoded * 8; // get bits
            totalBitSum += decoded * 8; // get bits

            last_time_base = time_base;
            int32_t timestamp = (pkt.dts * frame_duration);

            
            //printf("Total Average Bitrate: %f\n", totalBitSum / (pkt.dts * frame_duration) / 1000);
            AVRational rate = av_guess_frame_rate(fmt_ctx, video_stream, frame);

            length+= sprintf(buffer + length, "%d kbps;", (time_base == 0.0) ? (int) bitRateSum / 1000: (int)(bitRateSum / time_base / 1000));
            length+= sprintf(buffer + length, "%d;", av_frame_get_pkt_size(frame));
            length+= sprintf(buffer + length, "%d - %d;", (int32_t) (pkt.dts * frame_duration * 1000), (int32_t) ((pkt.dts + pkt.duration) * frame_duration * 1000));
            length+= sprintf(buffer + length, "%d;", frame->coded_picture_number);
            length+= sprintf(buffer + length, "%d ms;", (int) (pkt.duration * frame_duration * 1000));
            length+= sprintf(buffer + length, "%02d:%02d:%02d;", timestamp / 3600, timestamp / 60, timestamp % 60);
            length+= sprintf(buffer + length, "%s;",src_filename);
            length+= sprintf(buffer + length, "decoderFrameRateDummy;");
            length+= sprintf(buffer + length, "libavcodec %s;", video_dec_ctx->codec->name);
            length+= sprintf(buffer + length, "%f", (float) rate.num / (float) rate.den);
            
            if (pkt.duration > 0) // discards invalid endframes
            {
                printf("%s\n",buffer);
            }
    }
    }

    return decoded;
}
Exemplo n.º 7
0
Arquivo: demux.c Projeto: mstorsjo/vlc
int avformat_OpenDemux( vlc_object_t *p_this )
{
    demux_t       *p_demux = (demux_t*)p_this;
    demux_sys_t   *p_sys;
    AVInputFormat *fmt = NULL;
    vlc_tick_t    i_start_time = VLC_TICK_INVALID;
    bool          b_can_seek;
    const char    *psz_url;
    int           error;

    if( p_demux->psz_filepath )
        psz_url = p_demux->psz_filepath;
    else
        psz_url = p_demux->psz_url;

    if( avformat_ProbeDemux( p_this, &fmt, psz_url ) != VLC_SUCCESS )
        return VLC_EGENERIC;

    vlc_stream_Control( p_demux->s, STREAM_CAN_SEEK, &b_can_seek );

    /* Fill p_demux fields */
    p_demux->pf_demux = Demux;
    p_demux->pf_control = Control;
    p_demux->p_sys = p_sys = malloc( sizeof( demux_sys_t ) );
    if( !p_sys )
        return VLC_ENOMEM;

    p_sys->ic = 0;
    p_sys->fmt = fmt;
    p_sys->tracks = NULL;
    p_sys->i_ssa_order = 0;
    TAB_INIT( p_sys->i_attachments, p_sys->attachments);
    p_sys->p_title = NULL;
    p_sys->i_seekpoint = 0;
    p_sys->i_update = 0;

    /* Create I/O wrapper */
    unsigned char * p_io_buffer = av_malloc( AVFORMAT_IOBUFFER_SIZE );
    if( !p_io_buffer )
    {
        avformat_CloseDemux( p_this );
        return VLC_ENOMEM;
    }

    p_sys->ic = avformat_alloc_context();
    if( !p_sys->ic )
    {
        av_free( p_io_buffer );
        avformat_CloseDemux( p_this );
        return VLC_ENOMEM;
    }

    AVIOContext *pb = p_sys->ic->pb = avio_alloc_context( p_io_buffer,
        AVFORMAT_IOBUFFER_SIZE, 0, p_demux, IORead, NULL, IOSeek );
    if( !pb )
    {
        av_free( p_io_buffer );
        avformat_CloseDemux( p_this );
        return VLC_ENOMEM;
    }

    p_sys->ic->pb->seekable = b_can_seek ? AVIO_SEEKABLE_NORMAL : 0;
    error = avformat_open_input(&p_sys->ic, psz_url, p_sys->fmt, NULL);

    if( error < 0 )
    {
        msg_Err( p_demux, "Could not open %s: %s", psz_url,
                 vlc_strerror_c(AVUNERROR(error)) );
        av_free( pb->buffer );
        av_free( pb );
        p_sys->ic = NULL;
        avformat_CloseDemux( p_this );
        return VLC_EGENERIC;
    }

    char *psz_opts = var_InheritString( p_demux, "avformat-options" );
    unsigned nb_streams = p_sys->ic->nb_streams;

    AVDictionary *options[nb_streams ? nb_streams : 1];
    options[0] = NULL;
    for (unsigned i = 1; i < nb_streams; i++)
        options[i] = NULL;
    if (psz_opts) {
        vlc_av_get_options(psz_opts, &options[0]);
        for (unsigned i = 1; i < nb_streams; i++) {
            av_dict_copy(&options[i], options[0], 0);
        }
        free(psz_opts);
    }
    vlc_avcodec_lock(); /* avformat calls avcodec behind our back!!! */
    error = avformat_find_stream_info( p_sys->ic, options );
    vlc_avcodec_unlock();
    AVDictionaryEntry *t = NULL;
    while ((t = av_dict_get(options[0], "", t, AV_DICT_IGNORE_SUFFIX))) {
        msg_Err( p_demux, "Unknown option \"%s\"", t->key );
    }
    av_dict_free(&options[0]);
    for (unsigned i = 1; i < nb_streams; i++) {
        av_dict_free(&options[i]);
    }

    nb_streams = p_sys->ic->nb_streams; /* it may have changed */
    if( !nb_streams )
    {
        msg_Err( p_demux, "No streams found");
        avformat_CloseDemux( p_this );
        return VLC_EGENERIC;
    }
    p_sys->tracks = calloc( nb_streams, sizeof(*p_sys->tracks) );
    if( !p_sys->tracks )
    {
        avformat_CloseDemux( p_this );
        return VLC_ENOMEM;
    }
    p_sys->i_tracks = nb_streams;

    if( error < 0 )
    {
        msg_Warn( p_demux, "Could not find stream info: %s",
                  vlc_strerror_c(AVUNERROR(error)) );
    }

    for( unsigned i = 0; i < nb_streams; i++ )
    {
        struct avformat_track_s *p_track = &p_sys->tracks[i];
        AVStream *s = p_sys->ic->streams[i];
        const AVCodecParameters *cp = s->codecpar;
        es_format_t es_fmt;
        const char *psz_type = "unknown";

        /* Do not use the cover art as a stream */
        if( s->disposition == AV_DISPOSITION_ATTACHED_PIC )
            continue;

        vlc_fourcc_t fcc = GetVlcFourcc( cp->codec_id );
        switch( cp->codec_type )
        {
        case AVMEDIA_TYPE_AUDIO:
            es_format_Init( &es_fmt, AUDIO_ES, fcc );
            es_fmt.i_original_fourcc = CodecTagToFourcc( cp->codec_tag );
            es_fmt.i_bitrate = cp->bit_rate;
            es_fmt.audio.i_channels = cp->channels;
            es_fmt.audio.i_rate = cp->sample_rate;
            es_fmt.audio.i_bitspersample = cp->bits_per_coded_sample;
            es_fmt.audio.i_blockalign = cp->block_align;
            psz_type = "audio";

            if(cp->codec_id == AV_CODEC_ID_AAC_LATM)
            {
                es_fmt.i_original_fourcc = VLC_FOURCC('L','A','T','M');
                es_fmt.b_packetized = false;
            }
            else if(cp->codec_id == AV_CODEC_ID_AAC && p_sys->fmt->long_name &&
                    strstr(p_sys->fmt->long_name, "raw ADTS AAC"))
            {
                es_fmt.i_original_fourcc = VLC_FOURCC('A','D','T','S');
                es_fmt.b_packetized = false;
            }
            break;

        case AVMEDIA_TYPE_VIDEO:
            es_format_Init( &es_fmt, VIDEO_ES, fcc );
            es_fmt.i_original_fourcc = CodecTagToFourcc( cp->codec_tag );

            es_fmt.video.i_bits_per_pixel = cp->bits_per_coded_sample;
            /* Special case for raw video data */
            if( cp->codec_id == AV_CODEC_ID_RAWVIDEO )
            {
                msg_Dbg( p_demux, "raw video, pixel format: %i", cp->format );
                if( GetVlcChroma( &es_fmt.video, cp->format ) != VLC_SUCCESS)
                {
                    msg_Err( p_demux, "was unable to find a FourCC match for raw video" );
                }
                else
                    es_fmt.i_codec = es_fmt.video.i_chroma;
            }
            /* We need this for the h264 packetizer */
            else if( cp->codec_id == AV_CODEC_ID_H264 && ( p_sys->fmt == av_find_input_format("flv") ||
                p_sys->fmt == av_find_input_format("matroska") || p_sys->fmt == av_find_input_format("mp4") ) )
                es_fmt.i_original_fourcc = VLC_FOURCC( 'a', 'v', 'c', '1' );

            es_fmt.video.i_width = cp->width;
            es_fmt.video.i_height = cp->height;
            es_fmt.video.i_visible_width = es_fmt.video.i_width;
            es_fmt.video.i_visible_height = es_fmt.video.i_height;

            get_rotation(&es_fmt, s);

# warning FIXME: implement palette transmission
            psz_type = "video";

            AVRational rate;
#if (LIBAVUTIL_VERSION_MICRO < 100) /* libav */
# if (LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(55, 20, 0))
            rate.num = s->time_base.num;
            rate.den = s->time_base.den;
# else
            rate.num = s->codec->time_base.num;
            rate.den = s->codec->time_base.den;
# endif
            rate.den *= __MAX( s->codec->ticks_per_frame, 1 );
#else /* ffmpeg */
            rate = av_guess_frame_rate( p_sys->ic, s, NULL );
#endif
            if( rate.den && rate.num )
            {
                es_fmt.video.i_frame_rate = rate.num;
                es_fmt.video.i_frame_rate_base = rate.den;
            }

            AVRational ar;
#if (LIBAVUTIL_VERSION_MICRO < 100) /* libav */
            ar.num = s->sample_aspect_ratio.num;
            ar.den = s->sample_aspect_ratio.den;
#else
            ar = av_guess_sample_aspect_ratio( p_sys->ic, s, NULL );
#endif
            if( ar.num && ar.den )
            {
                es_fmt.video.i_sar_den = ar.den;
                es_fmt.video.i_sar_num = ar.num;
            }
            break;

        case AVMEDIA_TYPE_SUBTITLE:
            es_format_Init( &es_fmt, SPU_ES, fcc );
            es_fmt.i_original_fourcc = CodecTagToFourcc( cp->codec_tag );
            if( strncmp( p_sys->ic->iformat->name, "matroska", 8 ) == 0 &&
                cp->codec_id == AV_CODEC_ID_DVD_SUBTITLE &&
                cp->extradata != NULL &&
                cp->extradata_size > 0 )
            {
                char *psz_start;
                char *psz_buf = malloc( cp->extradata_size + 1);
                if( psz_buf != NULL )
                {
                    memcpy( psz_buf, cp->extradata , cp->extradata_size );
                    psz_buf[cp->extradata_size] = '\0';

                    psz_start = strstr( psz_buf, "size:" );
                    if( psz_start &&
                        vobsub_size_parse( psz_start,
                                           &es_fmt.subs.spu.i_original_frame_width,
                                           &es_fmt.subs.spu.i_original_frame_height ) == VLC_SUCCESS )
                    {
                        msg_Dbg( p_demux, "original frame size: %dx%d",
                                 es_fmt.subs.spu.i_original_frame_width,
                                 es_fmt.subs.spu.i_original_frame_height );
                    }
                    else
                    {
                        msg_Warn( p_demux, "reading original frame size failed" );
                    }

                    psz_start = strstr( psz_buf, "palette:" );
                    if( psz_start &&
                        vobsub_palette_parse( psz_start, &es_fmt.subs.spu.palette[1] ) == VLC_SUCCESS )
                    {
                        es_fmt.subs.spu.palette[0] = SPU_PALETTE_DEFINED;
                        msg_Dbg( p_demux, "vobsub palette read" );
                    }
                    else
                    {
                        msg_Warn( p_demux, "reading original palette failed" );
                    }
                    free( psz_buf );
                }
            }
            else if( cp->codec_id == AV_CODEC_ID_DVB_SUBTITLE &&
                     cp->extradata_size > 3 )
            {
                es_fmt.subs.dvb.i_id = GetWBE( cp->extradata ) |
                                      (GetWBE( cp->extradata + 2 ) << 16);
            }
            else if( cp->codec_id == AV_CODEC_ID_MOV_TEXT )
            {
                if( cp->extradata_size && (es_fmt.p_extra = malloc(cp->extradata_size)) )
                {
                    memcpy( es_fmt.p_extra, cp->extradata, cp->extradata_size );
                    es_fmt.i_extra = cp->extradata_size;
                }
            }
            psz_type = "subtitle";
            break;

        default:
            es_format_Init( &es_fmt, UNKNOWN_ES, 0 );
            es_fmt.i_original_fourcc = CodecTagToFourcc( cp->codec_tag );
#ifdef HAVE_AVUTIL_CODEC_ATTACHMENT
            if( cp->codec_type == AVMEDIA_TYPE_ATTACHMENT )
            {
                input_attachment_t *p_attachment;

                psz_type = "attachment";
                if( cp->codec_id == AV_CODEC_ID_TTF )
                {
                    AVDictionaryEntry *filename = av_dict_get( s->metadata, "filename", NULL, 0 );
                    if( filename && filename->value )
                    {
                        p_attachment = vlc_input_attachment_New(
                                filename->value, "application/x-truetype-font",
                                NULL, cp->extradata, (int)cp->extradata_size );
                        if( p_attachment )
                            TAB_APPEND( p_sys->i_attachments, p_sys->attachments,
                                        p_attachment );
                    }
                }
                else msg_Warn( p_demux, "unsupported attachment type (%u) in avformat demux", cp->codec_id );
            }
            else
#endif
            {
                if( cp->codec_type == AVMEDIA_TYPE_DATA )
                    psz_type = "data";

                msg_Warn( p_demux, "unsupported track type (%u:%u) in avformat demux", cp->codec_type, cp->codec_id );
            }
            break;
        }

        AVDictionaryEntry *language = av_dict_get( s->metadata, "language", NULL, 0 );
        if ( language && language->value )
            es_fmt.psz_language = strdup( language->value );

        if( s->disposition & AV_DISPOSITION_DEFAULT )
            es_fmt.i_priority = ES_PRIORITY_SELECTABLE_MIN + 1000;

#ifdef HAVE_AVUTIL_CODEC_ATTACHMENT
        if( cp->codec_type != AVMEDIA_TYPE_ATTACHMENT )
#endif
        if( cp->codec_type != AVMEDIA_TYPE_DATA )
        {
            const bool    b_ogg = !strcmp( p_sys->fmt->name, "ogg" );
            const uint8_t *p_extra = cp->extradata;
            unsigned      i_extra  = cp->extradata_size;

            if( cp->codec_id == AV_CODEC_ID_THEORA && b_ogg )
            {
                unsigned pi_size[3];
                const void *pp_data[3];
                unsigned i_count;
                for( i_count = 0; i_count < 3; i_count++ )
                {
                    if( i_extra < 2 )
                        break;
                    pi_size[i_count] = GetWBE( p_extra );
                    pp_data[i_count] = &p_extra[2];
                    if( i_extra < pi_size[i_count] + 2 )
                        break;

                    p_extra += 2 + pi_size[i_count];
                    i_extra -= 2 + pi_size[i_count];
                }
                if( i_count > 0 && xiph_PackHeaders( &es_fmt.i_extra, &es_fmt.p_extra,
                                                     pi_size, pp_data, i_count ) )
                {
                    es_fmt.i_extra = 0;
                    es_fmt.p_extra = NULL;
                }
            }
            else if( cp->codec_id == AV_CODEC_ID_SPEEX && b_ogg )
            {
                const uint8_t p_dummy_comment[] = {
                    0, 0, 0, 0,
                    0, 0, 0, 0,
                };
                unsigned pi_size[2];
                const void *pp_data[2];

                pi_size[0] = i_extra;
                pp_data[0] = p_extra;

                pi_size[1] = sizeof(p_dummy_comment);
                pp_data[1] = p_dummy_comment;

                if( pi_size[0] > 0 && xiph_PackHeaders( &es_fmt.i_extra, &es_fmt.p_extra,
                                                        pi_size, pp_data, 2 ) )
                {
                    es_fmt.i_extra = 0;
                    es_fmt.p_extra = NULL;
                }
            }
            else if( cp->codec_id == AV_CODEC_ID_OPUS )
            {
                const uint8_t p_dummy_comment[] = {
                    'O', 'p', 'u', 's',
                    'T', 'a', 'g', 's',
                    0, 0, 0, 0, /* Vendor String length */
                                /* Vendor String */
                    0, 0, 0, 0, /* User Comment List Length */

                };
                unsigned pi_size[2];
                const void *pp_data[2];

                pi_size[0] = i_extra;
                pp_data[0] = p_extra;

                pi_size[1] = sizeof(p_dummy_comment);
                pp_data[1] = p_dummy_comment;

                if( pi_size[0] > 0 && xiph_PackHeaders( &es_fmt.i_extra, &es_fmt.p_extra,
                                                        pi_size, pp_data, 2 ) )
                {
                    es_fmt.i_extra = 0;
                    es_fmt.p_extra = NULL;
                }
            }
            else if( cp->extradata_size > 0 && !es_fmt.i_extra )
            {
                es_fmt.p_extra = malloc( i_extra );
                if( es_fmt.p_extra )
                {
                    es_fmt.i_extra = i_extra;
                    memcpy( es_fmt.p_extra, p_extra, i_extra );
                }
            }

            p_track->p_es = es_out_Add( p_demux->out, &es_fmt );
            if( p_track->p_es && (s->disposition & AV_DISPOSITION_DEFAULT) )
                es_out_Control( p_demux->out, ES_OUT_SET_ES_DEFAULT, p_track->p_es );

            msg_Dbg( p_demux, "adding es: %s codec = %4.4s (%d)",
                     psz_type, (char*)&fcc, cp->codec_id  );
        }
        es_format_Clean( &es_fmt );
    }

    if( p_sys->ic->start_time != (int64_t)AV_NOPTS_VALUE )
        i_start_time = FROM_AV_TS(p_sys->ic->start_time);

    msg_Dbg( p_demux, "AVFormat(%s %s) supported stream", AVPROVIDER(LIBAVFORMAT), LIBAVFORMAT_IDENT );
    msg_Dbg( p_demux, "    - format = %s (%s)",
             p_sys->fmt->name, p_sys->fmt->long_name );
    msg_Dbg( p_demux, "    - start time = %"PRId64, i_start_time );
    msg_Dbg( p_demux, "    - duration = %"PRId64,
             ( p_sys->ic->duration != (int64_t)AV_NOPTS_VALUE ) ?
             FROM_AV_TS(p_sys->ic->duration) : -1 );

    if( p_sys->ic->nb_chapters > 0 )
    {
        p_sys->p_title = vlc_input_title_New();
        p_sys->p_title->i_length = FROM_AV_TS(p_sys->ic->duration);
    }

    for( unsigned i = 0; i < p_sys->ic->nb_chapters; i++ )
    {
        seekpoint_t *s = vlc_seekpoint_New();

        AVDictionaryEntry *title = av_dict_get( p_sys->ic->metadata, "title", NULL, 0);
        if( title && title->value )
        {
            s->psz_name = strdup( title->value );
            EnsureUTF8( s->psz_name );
            msg_Dbg( p_demux, "    - chapter %d: %s", i, s->psz_name );
        }
        s->i_time_offset = vlc_tick_from_samples( p_sys->ic->chapters[i]->start *
            p_sys->ic->chapters[i]->time_base.num,
            p_sys->ic->chapters[i]->time_base.den ) -
            (i_start_time != VLC_TICK_INVALID ? i_start_time : 0 );
        TAB_APPEND( p_sys->p_title->i_seekpoint, p_sys->p_title->seekpoint, s );
    }

    ResetTime( p_demux, 0 );
    return VLC_SUCCESS;
}
static int func_run_sync(IJKFF_Pipenode *node)
{
    JNIEnv                *env      = NULL;
    IJKFF_Pipenode_Opaque *opaque   = node->opaque;
    FFPlayer              *ffp      = opaque->ffp;
    VideoState            *is       = ffp->is;
    Decoder               *d        = &is->viddec;
    PacketQueue           *q        = d->queue;
    int                    ret      = 0;
    int                    dequeue_count = 0;
    AVFrame               *frame    = NULL;
    int                    got_frame = 0;
    AVRational             tb         = is->video_st->time_base;
    AVRational             frame_rate = av_guess_frame_rate(is->ic, is->video_st, NULL);
    double                 duration;
    double                 pts;

    if (!opaque->acodec) {
        return ffp_video_thread(ffp);
    }

    if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) {
        ALOGE("%s: SetupThreadEnv failed\n", __func__);
        return -1;
    }

    frame = av_frame_alloc();
    if (!frame)
        goto fail;

    if (opaque->frame_rotate_degrees == 90 || opaque->frame_rotate_degrees == 270) {
        opaque->frame_width  = opaque->avctx->height;
        opaque->frame_height = opaque->avctx->width;
    } else {
        opaque->frame_width  = opaque->avctx->width;
        opaque->frame_height = opaque->avctx->height;
    }

    opaque->enqueue_thread = SDL_CreateThreadEx(&opaque->_enqueue_thread, enqueue_thread_func, node, "amediacodec_input_thread");
    if (!opaque->enqueue_thread) {
        ALOGE("%s: SDL_CreateThreadEx failed\n", __func__);
        ret = -1;
        goto fail;
    }

    while (!q->abort_request) {
        int64_t timeUs = opaque->acodec_first_dequeue_output_request ? 0 : AMC_OUTPUT_TIMEOUT_US;
        got_frame = 0;
        ret = drain_output_buffer(env, node, timeUs, &dequeue_count, frame, &got_frame);
        if (opaque->acodec_first_dequeue_output_request) {
            SDL_LockMutex(opaque->acodec_first_dequeue_output_mutex);
            opaque->acodec_first_dequeue_output_request = false;
            SDL_CondSignal(opaque->acodec_first_dequeue_output_cond);
            SDL_UnlockMutex(opaque->acodec_first_dequeue_output_mutex);
        }
        if (ret != 0) {
            ret = -1;
            if (got_frame && frame->opaque)
                SDL_VoutAndroid_releaseBufferProxyP(opaque->weak_vout, (SDL_AMediaCodecBufferProxy **)&frame->opaque, false);
            goto fail;
        }
        if (got_frame) {
            duration = (frame_rate.num && frame_rate.den ? av_q2d((AVRational){frame_rate.den, frame_rate.num}) : 0);
            pts = (frame->pts == AV_NOPTS_VALUE) ? NAN : frame->pts * av_q2d(tb);
            ret = ffp_queue_picture(ffp, frame, pts, duration, av_frame_get_pkt_pos(frame), is->viddec.pkt_serial);
            if (ret) {
                if (frame->opaque)
                    SDL_VoutAndroid_releaseBufferProxyP(opaque->weak_vout, (SDL_AMediaCodecBufferProxy **)&frame->opaque, false);
            }
            av_frame_unref(frame);
        }
    }

fail:
    av_frame_free(&frame);
    SDL_AMediaCodecFake_abort(opaque->acodec);
    if (opaque->n_buf_out) {
        free(opaque->amc_buf_out);
        opaque->n_buf_out = 0;
        opaque->amc_buf_out = NULL;
        opaque->off_buf_out = 0;
        opaque->last_queued_pts = AV_NOPTS_VALUE;
    }
    if (opaque->acodec) {
        SDL_VoutAndroid_invalidateAllBuffers(opaque->weak_vout);
        SDL_LockMutex(opaque->acodec_mutex);
        SDL_AMediaCodec_stop(opaque->acodec);
        SDL_UnlockMutex(opaque->acodec_mutex);
    }
    SDL_WaitThread(opaque->enqueue_thread, NULL);
    SDL_AMediaCodec_decreaseReferenceP(&opaque->acodec);
    ALOGI("MediaCodec: %s: exit: %d", __func__, ret);
    return ret;
#if 0
fallback_to_ffplay:
    ALOGW("fallback to ffplay decoder\n");
    return ffp_video_thread(opaque->ffp);
#endif
}
Exemplo n.º 9
0
static void
prepare (GeglOperation *operation)
{
  GeglProperties *o = GEGL_PROPERTIES (operation);
  Priv       *p = (Priv*)o->user_data;

  if (p == NULL)
    init (o);
  p = (Priv*)o->user_data;

  g_assert (o->user_data != NULL);

  gegl_operation_set_format (operation, "output", babl_format ("R'G'B' u8"));

  if (!p->loadedfilename ||
      strcmp (p->loadedfilename, o->path) ||
       p->prevframe > o->frame  /* a bit heavy handed, but improves consistency */
      )
    {
      gint i;
      gint err;

      ff_cleanup (o);
      err = avformat_open_input(&p->video_fcontext, o->path, NULL, 0);
      if (err < 0)
        {
          print_error (o->path, err);
        }
      err = avformat_find_stream_info (p->video_fcontext, NULL);
      if (err < 0)
        {
          g_warning ("ff-load: error finding stream info for %s", o->path);
          return;
        }
      err = avformat_open_input(&p->audio_fcontext, o->path, NULL, 0);
      if (err < 0)
        {
          print_error (o->path, err);
        }
      err = avformat_find_stream_info (p->audio_fcontext, NULL);
      if (err < 0)
        {
          g_warning ("ff-load: error finding stream info for %s", o->path);
          return;
        }

      for (i = 0; i< p->video_fcontext->nb_streams; i++)
        {
          AVCodecContext *c = p->video_fcontext->streams[i]->codec;
          if (c->codec_type == AVMEDIA_TYPE_VIDEO)
            {
              p->video_stream = p->video_fcontext->streams[i];
              p->video_index = i;
            }
          if (c->codec_type == AVMEDIA_TYPE_AUDIO)
            {
              p->audio_stream = p->audio_fcontext->streams[i];
              p->audio_index = i;
            }
        }

      p->video_codec = avcodec_find_decoder (p->video_stream->codec->codec_id);

      if (p->audio_stream)
        {
	  p->audio_codec = avcodec_find_decoder (p->audio_stream->codec->codec_id);
	  if (p->audio_codec == NULL)
            g_warning ("audio codec not found");
          else 
	    if (avcodec_open2 (p->audio_stream->codec, p->audio_codec, NULL) < 0)
              {
                 g_warning ("error opening codec %s", p->audio_stream->codec->codec->name);
              }
            else
              {
                 o->audio_sample_rate = p->audio_stream->codec->sample_rate;
                 o->audio_channels = MIN(p->audio_stream->codec->channels, GEGL_MAX_AUDIO_CHANNELS);
              }
        }

      p->video_stream->codec->err_recognition = AV_EF_IGNORE_ERR |
                                                AV_EF_BITSTREAM |
                                                AV_EF_BUFFER;
      p->video_stream->codec->workaround_bugs = FF_BUG_AUTODETECT;

      if (p->video_codec == NULL)
          g_warning ("video codec not found");

      if (avcodec_open2 (p->video_stream->codec, p->video_codec, NULL) < 0)
        {
          g_warning ("error opening codec %s", p->video_stream->codec->codec->name);
          return;
        }

      p->width = p->video_stream->codec->width;
      p->height = p->video_stream->codec->height;
      p->lavc_frame = av_frame_alloc ();

      if (o->video_codec)
        g_free (o->video_codec);
      if (p->video_codec->name)
        o->video_codec = g_strdup (p->video_codec->name);
      else
        o->video_codec = g_strdup ("");

      if (o->audio_codec)
        g_free (o->audio_codec);
      if (p->audio_codec && p->audio_codec->name)
        o->audio_codec = g_strdup (p->audio_codec->name);
      else
        o->audio_codec = g_strdup ("");

      if (p->loadedfilename)
        g_free (p->loadedfilename);
      p->loadedfilename = g_strdup (o->path);
      p->prevframe = -1;
      p->a_prevframe = -1;

      o->frames = p->video_stream->nb_frames;
      o->frame_rate = av_q2d (av_guess_frame_rate (p->video_fcontext, p->video_stream, NULL));
      if (!o->frames)
      {
        /* this is a guesstimate of frame-count */
	o->frames = p->video_fcontext->duration * o->frame_rate / AV_TIME_BASE;
        /* make second guess for things like luxo */
	if (o->frames < 1)
          o->frames = 23;
      }
#if 0
      {
        int m ,h;
        int s = o->frames / o->frame_rate;
        m = s / 60;
        s -= m * 60;
        h = m / 60;
        m -= h * 60;
        fprintf (stdout, "duration: %02i:%02i:%02i\n", h, m, s);
      }
#endif

    p->codec_delay = p->video_stream->codec->delay;
  
    if (!strcmp (o->video_codec, "mpeg1video"))
      p->codec_delay = 1;
    else if (!strcmp (o->video_codec, "h264"))
    {
      if (strstr (p->video_fcontext->filename, ".mp4") ||
          strstr (p->video_fcontext->filename, ".MP4"))  /* XXX: too hacky, isn't there an avformat thing to use?,
 or perhaps we can measure this when decoding the first frame.
 */
        p->codec_delay = 3;
      else
        p->codec_delay = 0;
    }

    clear_audio_track (o);
  }
}
Exemplo n.º 10
0
static int configure_input_video_filter(FilterGraph *fg, InputFilter *ifilter,
                                        AVFilterInOut *in)
{
    AVFilterContext *last_filter;
    const AVFilter *buffer_filt = avfilter_get_by_name("buffer");
    InputStream *ist = ifilter->ist;
    InputFile     *f = input_files[ist->file_index];
    AVRational tb = ist->framerate.num ? av_inv_q(ist->framerate) :
                                         ist->st->time_base;
    AVRational fr = ist->framerate;
    AVRational sar;
    AVBPrint args;
    char name[255];
    int ret, pad_idx = 0;
    int64_t tsoffset = 0;

    if (ist->dec_ctx->codec_type == AVMEDIA_TYPE_AUDIO) {
        av_log(NULL, AV_LOG_ERROR, "Cannot connect video filter to audio input\n");
        return AVERROR(EINVAL);
    }

    if (!fr.num)
        fr = av_guess_frame_rate(input_files[ist->file_index]->ctx, ist->st, NULL);

    if (ist->dec_ctx->codec_type == AVMEDIA_TYPE_SUBTITLE) {
        ret = sub2video_prepare(ist);
        if (ret < 0)
            return ret;
    }

    sar = ist->st->sample_aspect_ratio.num ?
          ist->st->sample_aspect_ratio :
          ist->dec_ctx->sample_aspect_ratio;
    if(!sar.den)
        sar = (AVRational){0,1};
    av_bprint_init(&args, 0, 1);
    av_bprintf(&args,
             "video_size=%dx%d:pix_fmt=%d:time_base=%d/%d:"
             "pixel_aspect=%d/%d:sws_param=flags=%d", ist->resample_width,
             ist->resample_height,
             ist->hwaccel_retrieve_data ? ist->hwaccel_retrieved_pix_fmt : ist->resample_pix_fmt,
             tb.num, tb.den, sar.num, sar.den,
             SWS_BILINEAR + ((ist->dec_ctx->flags&AV_CODEC_FLAG_BITEXACT) ? SWS_BITEXACT:0));
    if (fr.num && fr.den)
        av_bprintf(&args, ":frame_rate=%d/%d", fr.num, fr.den);
    snprintf(name, sizeof(name), "graph %d input from stream %d:%d", fg->index,
             ist->file_index, ist->st->index);

    if ((ret = avfilter_graph_create_filter(&ifilter->filter, buffer_filt, name,
                                            args.str, NULL, fg->graph)) < 0)
        return ret;
    last_filter = ifilter->filter;

    if (ist->autorotate) {
        double theta = get_rotation(ist->st);

        if (fabs(theta - 90) < 1.0) {
            ret = insert_filter(&last_filter, &pad_idx, "transpose", "clock");
        } else if (fabs(theta - 180) < 1.0) {
            ret = insert_filter(&last_filter, &pad_idx, "hflip", NULL);
            if (ret < 0)
                return ret;
            ret = insert_filter(&last_filter, &pad_idx, "vflip", NULL);
        } else if (fabs(theta - 270) < 1.0) {
            ret = insert_filter(&last_filter, &pad_idx, "transpose", "cclock");
        } else if (fabs(theta) > 1.0) {
            char rotate_buf[64];
            snprintf(rotate_buf, sizeof(rotate_buf), "%f*PI/180", theta);
            ret = insert_filter(&last_filter, &pad_idx, "rotate", rotate_buf);
        }
        if (ret < 0)
            return ret;
    }

    if (ist->framerate.num) {
        AVFilterContext *setpts;

        snprintf(name, sizeof(name), "force CFR for input from stream %d:%d",
                 ist->file_index, ist->st->index);
        if ((ret = avfilter_graph_create_filter(&setpts,
                                                avfilter_get_by_name("setpts"),
                                                name, "N", NULL,
                                                fg->graph)) < 0)
            return ret;

        if ((ret = avfilter_link(last_filter, 0, setpts, 0)) < 0)
            return ret;

        last_filter = setpts;
    }

    if (do_deinterlace) {
        AVFilterContext *yadif;

        snprintf(name, sizeof(name), "deinterlace input from stream %d:%d",
                 ist->file_index, ist->st->index);
        if ((ret = avfilter_graph_create_filter(&yadif,
                                                avfilter_get_by_name("yadif"),
                                                name, "", NULL,
                                                fg->graph)) < 0)
            return ret;

        if ((ret = avfilter_link(last_filter, 0, yadif, 0)) < 0)
            return ret;

        last_filter = yadif;
    }

    snprintf(name, sizeof(name), "trim for input stream %d:%d",
             ist->file_index, ist->st->index);
    if (copy_ts) {
        tsoffset = f->start_time == AV_NOPTS_VALUE ? 0 : f->start_time;
        if (!start_at_zero && f->ctx->start_time != AV_NOPTS_VALUE)
            tsoffset += f->ctx->start_time;
    }
    ret = insert_trim(((f->start_time == AV_NOPTS_VALUE) || !f->accurate_seek) ?
                      AV_NOPTS_VALUE : tsoffset, f->recording_time,
                      &last_filter, &pad_idx, name);
    if (ret < 0)
        return ret;

    if ((ret = avfilter_link(last_filter, 0, in->filter_ctx, in->pad_idx)) < 0)
        return ret;
    return 0;
}
Exemplo n.º 11
0
	float getFrameRate(){
		AVRational avFps = av_guess_frame_rate(pFormatCtx, pFormatCtx->streams[videoStream], NULL);
		return avFps.num != 0 && avFps.den != 0 ? (float)av_q2d(avFps) : 30.0f;
	}