예제 #1
0
파일: demo_YUV.c 프로젝트: taoanran/ffmpeg
int main(int argc, char **argv)
{
    AVFormatContext* pCtx = 0;
    AVCodecContext *pCodecCtx = 0;
    AVCodec *pCodec = 0;
    AVPacket packet;
    AVFrame *pFrame = 0;
    FILE *fpo1 = NULL;
    FILE *fpo2 = NULL;
    int nframe;
    int err;
    int got_picture = -1;
    int picwidth, picheight, linesize;
    unsigned char *pBuf;
    int i;
    int64_t timestamp;
    struct options opt;
    int usefo = 0;
    struct audio_dsp dsp;
    int dusecs;
    float usecs1 = 0;
    float usecs2 = 0;
    struct timeval elapsed1, elapsed2;
    int decoded = 0;

	//taoanran add +++++++++
	int ret = -1;
	int videoStream = -1; //video streamID
	// ----------------------

	int flags = SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER;
 #if 0
    if (SDL_Init (flags)) {
        fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
        fprintf(stderr, "(Did you set the DISPLAY variable?)\n");
        exit(1);
    }
#endif
    av_register_all();

    av_log_set_callback(log_callback);
    av_log_set_level(50);

    if (Myparse_option(&opt, argc, argv) < 0 || (strlen(opt.finput) == 0))
    {
        Myshow_help(argv[0]);
        return 0;
    }

    err = avformat_open_input(&pCtx, opt.finput, 0, 0);
    if (err < 0)
    {
        printf("\n->(avformat_open_input)\tERROR:\t%d\n", err);
        goto fail;
    }
	printf("=========================\n");
    err = avformat_find_stream_info(pCtx, 0);

    if (err < 0)
    {
        printf("\n->(avformat_find_stream_info)\tERROR:\t%d\n", err);
        goto fail;
    }
	av_dump_format(pCtx, 0, opt.finput, 0);

	// check the video stream
	videoStream = find_video_stream(pCtx);
	if (videoStream < 0)
	{
		printf("there is not audio stream !!!!!!! \n");
		return -1;
	}

	pCodecCtx = pCtx->streams[videoStream]->codec;
	pCodec = avcodec_find_decoder(pCodecCtx->codec_id);//find the video decoder
 	if (!pCodec)
    {
        printf("\ncan't find the audio decoder!\n");
        goto fail;
    }

	pFrame = avcodec_alloc_frame();

	//open videDecoder
	ret = avcodec_open2(pCodecCtx, pCodec, 0);

	if (ret < 0)
	{
		printf("avcodec_open2 error \n");
		return -1;
	}

#if 0
	//only for audio
	pFrame->nb_samples = pCodecCtx->frame_size;
	pFrame->format = pCodecCtx->sample_fmt;
	pFrame->channel_layout = pCodecCtx->channel_layout;
#endif
#if 0
	//set the param of SDL
	SDL_AudioSpec wanted_spec, spec; 
	wanted_spec.freq = pCodecCtx->sample_rate;  
	wanted_spec.format = AUDIO_S16SYS;  
	wanted_spec.channels = pCodecCtx->channels;  
	wanted_spec.silence = 0;  
	wanted_spec.samples = SDL_AUDIO_BUFFER_SIZE;  
	wanted_spec.callback = audio_callback;//audio_callback;  
	wanted_spec.userdata = pCodecCtx;;//pCodecCtx;  
	if(SDL_OpenAudio(&wanted_spec, &spec) < 0)  
    {  
        fprintf(stderr, "SDL_OpenAudio: %s/n", SDL_GetError());  
        return -1;  
    } 
#endif			

	 printf(" bit_rate = %d \r\n", pCodecCtx->bit_rate);
     printf(" sample_rate = %d \r\n", pCodecCtx->sample_rate);
     printf(" channels = %d \r\n", pCodecCtx->channels);
     printf(" code_name = %s \r\n", pCodecCtx->codec->name);

	char *data = NULL;
	while(av_read_frame(pCtx, &packet) >= 0)
	{
		//found the  audio frame !!!
		if (packet.stream_index == videoStream)
		{
			int got;
			int i;
		
			avcodec_decode_video2(pCodecCtx, pFrame,&got_picture,&packet);
			data = (char *)malloc(pFrame->width * pFrame->height);
			memset(data,0,pFrame->width * pFrame->height);
			printf("pFrame->width = %d\n", pFrame->width);
			printf("pFrame->height = %d\n", pFrame->height);
			printf("pFrame->linesize[0] = %d\n", pFrame->linesize[0]);
			printf("pFrame->linesize[1] = %d\n", pFrame->linesize[1]);
			printf("pFrame->linesize[2] = %d\n", pFrame->linesize[2]);

			//catch the YUV420P data
			saveYUV420P(pFrame->data[0], pFrame->linesize[0], pCodecCtx->width, pCodecCtx->height);      //Y: 4
			saveYUV420P(pFrame->data[1], pFrame->linesize[1], pCodecCtx->width/2, pCodecCtx->height/2);    //U : 1
			saveYUV420P(pFrame->data[2], pFrame->linesize[2], pCodecCtx->width/2, pCodecCtx->height/2);    //V : 1
		}
	}
	
	return 0;
#if 0	
	if (!opt.nodec)
    {
        
        pCodecCtx = pCtx->streams[opt.streamId]->codec;
 
        if (opt.thread_count <= 16 && opt.thread_count > 0 )
        {
            pCodecCtx->thread_count = opt.thread_count;
            pCodecCtx->thread_type = FF_THREAD_FRAME;
        }
        pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
        if (!pCodec)
        {
            printf("\n->不能找到编解码器!\n");
            goto fail;
        }
        err = avcodec_open2(pCodecCtx, pCodec, 0);
        if (err < 0)
        {
            printf("\n->(avcodec_open)\tERROR:\t%d\n", err);
            goto fail;
        }
        pFrame = avcodec_alloc_frame();
 
        if (opt.bplay)
        {
            dsp.audio_fd = open(OSS_DEVICE, O_WRONLY);
            if (dsp.audio_fd == -1)
            {
                printf("\n-> 无法打开音频设备\n");
                goto fail;
            }
            dsp.channels = pCodecCtx->channels;
            dsp.speed = pCodecCtx->sample_rate;
            dsp.format = map_formats(pCodecCtx->sample_fmt);
            if (set_audio(&dsp) < 0)
            {
                printf("\n-> 不能设置音频设备\n");
                goto fail;
            }
        }
    }
    nframe = 0;
	printf("=========================444444\n");
    while(nframe < opt.frames || opt.frames == -1)
    {
        gettimeofday(&elapsed1, NULL);
        err = av_read_frame(pCtx, &packet);
        if (err < 0)
        {
            printf("\n->(av_read_frame)\tERROR:\t%d\n", err);
            break;
        }
        gettimeofday(&elapsed2, NULL);
        dusecs = (elapsed2.tv_sec - elapsed1.tv_sec)*1000000 + (elapsed2.tv_usec - elapsed1.tv_usec);
        usecs2 += dusecs;
        timestamp = av_rescale_q(packet.dts, pCtx->streams[packet.stream_index]->time_base, (AVRational){1, AV_TIME_BASE});
        printf("\nFrame No ] stream#%d\tsize mB, timestamp:%6lld, dts:%6lld, pts:%6lld, ", nframe++, packet.stream_index, packet.size,
               timestamp, packet.dts, packet.pts);
        if (packet.stream_index == opt.streamId)
        {
#if 0
            for (i = 0; i < 16; i++)
            {
                if (i == 0) printf("\n pktdata: ");
                printf("%2x ", packet.data[i]);
            }
            printf("\n");
#endif
            if (usefo)
            {
                fwrite(packet.data, packet.size, 1, fpo1);
                fflush(fpo1);
            }
            if (pCtx->streams[opt.streamId]->codec->codec_type == AVMEDIA_TYPE_VIDEO && !opt.nodec)
            {
                picheight = pCtx->streams[opt.streamId]->codec->height;
                picwidth = pCtx->streams[opt.streamId]->codec->width;
 
                gettimeofday(&elapsed1, NULL);
                avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, &packet);
                decoded++;
                gettimeofday(&elapsed2, NULL);
                dusecs = (elapsed2.tv_sec - elapsed1.tv_sec)*1000000 + (elapsed2.tv_usec - elapsed1.tv_usec);
                usecs1 += dusecs;
                if (got_picture)
                {
                    printf("[Video: type %d, ref %d, pts %lld, pkt_pts %lld, pkt_dts %lld]",
                            pFrame->pict_type, pFrame->reference, pFrame->pts, pFrame->pkt_pts, pFrame->pkt_dts);
 
                    if (pCtx->streams[opt.streamId]->codec->pix_fmt == PIX_FMT_YUV420P)
                    {
                        if (usefo)
                        {
                            linesize = pFrame->linesize[0];
                            pBuf = pFrame->data[0];
                            for (i = 0; i < picheight; i++)
                            {
                                fwrite(pBuf, picwidth, 1, fpo2);
                                pBuf += linesize;
                            }
                            linesize = pFrame->linesize[1];
                            pBuf = pFrame->data[1];
                            for (i = 0; i < picheight/2; i++)
                            {
                                fwrite(pBuf, picwidth/2, 1, fpo2);
                                pBuf += linesize;
                            }
                            linesize = pFrame->linesize[2];
                            pBuf = pFrame->data[2];
                            for (i = 0; i < picheight/2; i++)
                            {
                                fwrite(pBuf, picwidth/2, 1, fpo2);
                                pBuf += linesize;
                            }
                            fflush(fpo2);
                        }
 
                        if (opt.bplay)
                        {
                            
                        }
                    }
                }
                av_free_packet(&packet);
            }
            else if (pCtx->streams[opt.streamId]->codec->codec_type == AVMEDIA_TYPE_AUDIO && !opt.nodec)
            {
                int got;
                gettimeofday(&elapsed1, NULL);
                avcodec_decode_audio4(pCodecCtx, pFrame, &got, &packet);
                decoded++;
                gettimeofday(&elapsed2, NULL);
                dusecs = (elapsed2.tv_sec - elapsed1.tv_sec)*1000000 + (elapsed2.tv_usec - elapsed1.tv_usec);
                usecs1 += dusecs;
                                if (got)
                                {
                    printf("[Audio: ]B raw data, decoding time: %d]", pFrame->linesize[0], dusecs);
                    if (usefo)
                    {
                        fwrite(pFrame->data[0], pFrame->linesize[0], 1, fpo2);
                        fflush(fpo2);
                    }
                    if (opt.bplay)
                    {
                        play_pcm(&dsp, pFrame->data[0], pFrame->linesize[0]);
                    }
                                }
            }
        }
    }
    if (!opt.nodec && pCodecCtx)
    {
        avcodec_close(pCodecCtx);
    }
    printf("\n%d 帧解析, average %.2f us per frame\n", nframe, usecs2/nframe);
    printf("%d 帧解码,平均 %.2f 我们每帧\n", decoded, usecs1/decoded);

#endif

fail:
    if (pCtx)
    {
        avformat_close_input(&pCtx);
    }


    return 0;
}
예제 #2
0
파일: video.c 프로젝트: phako/tn
struct VideoFile*
video_file_open(const char* filename)
{
    struct VideoFile* video_file = NULL;

    return_if(NULL == filename, NULL);

    video_file = (struct VideoFile*)malloc(sizeof(struct VideoFile));

    if (video_file)
    {
        memset(video_file, 0, sizeof(struct VideoFile));
        if (avformat_open_input(&(video_file->format_ctx), filename, NULL, NULL) == 0)
        {
            if (avformat_find_stream_info(video_file->format_ctx, NULL) >= 0)
            {   
                int idx;
                /* find video stream */
                idx = video_file->video_stream_idx = 
                    find_video_stream(video_file->format_ctx);

                if (idx >= 0)
                {
                    video_file->video_stream = 
                        video_file->format_ctx->streams[idx];
                    video_file->codec_ctx = 
                        video_file->format_ctx->streams[idx]->codec;

                    video_file->codec = create_codec(video_file->codec_ctx);
                    if (video_file->codec)
                    {
                        /* 
                         * create software scaler context for colorspace
                         * conversion
                         */
                        video_file->scale_ctx =
                            make_scale_context(video_file->codec_ctx);
                        if (video_file->scale_ctx)
                        {
                            video_file->width = 
                                video_file->codec_ctx->width;
                            video_file->height = 
                                video_file->codec_ctx->height;
                            video_file->frame = avcodec_alloc_frame();
                            if (video_file->frame)
                            {
                                video_file->frame_rgb = avcodec_alloc_frame();
                                if (video_file->frame_rgb)
                                {
                                    int bytes = avpicture_get_size(
                                            PIX_FMT_RGB24,
                                            video_file->width,
                                            video_file->height);
                                    video_file->rgb_buffer =
                                        (uint8_t *)av_malloc(bytes * sizeof(uint8_t));
                                    if (video_file->rgb_buffer)
                                    {
                                        avpicture_fill(
                                                (AVPicture *)video_file->frame_rgb, 
                                                video_file->rgb_buffer, 
                                                PIX_FMT_RGB24,
                                                video_file->width, 
                                                video_file->height);
                                        return video_file;
                                    }
                                }
                            }
                        }
                    }
                }
            }
        }

        video_file_close(video_file);
        video_file = NULL;
    }

    return video_file;
}