/*
 * Prepares an AVFormatContext for output.
 * Currently, the output format and codecs are hardcoded in this file.
 */
void Java_com_example_ffmpegtest_recorder_FFmpegWrapper_prepareAVFormatContext(JNIEnv *env, jobject obj, jstring jOutputPath){
    init();

    // Create AVRational that expects timestamps in microseconds
    videoSourceTimeBase = av_malloc(sizeof(AVRational));
    videoSourceTimeBase->num = 1;
    videoSourceTimeBase->den = 1000000;

    audioSourceTimeBase = av_malloc(sizeof(AVRational));
	audioSourceTimeBase->num = 1;
	audioSourceTimeBase->den = 1000000;

    AVFormatContext *inputFormatContext;
    outputPath = (*env)->GetStringUTFChars(env, jOutputPath, NULL);

    outputFormatContext = avFormatContextForOutputPath(outputPath, outputFormatName);
    LOGI("post avFormatContextForOutputPath");

    //  For copying AVFormatContext from sample file:
    /*
    inputFormatContext = avFormatContextForInputPath(sampleFilePath, outputFormatName);
    LOGI("post avFormatContextForInputPath");
    copyAVFormatContext(&outputFormatContext, &inputFormatContext);
    LOGI("post copyAVFormatContext");
    */

    // For manually crafting AVFormatContext
    addVideoStream(outputFormatContext);
    addAudioStream(outputFormatContext);
    av_opt_set_int(outputFormatContext->priv_data, "hls_time", hlsSegmentDurationSec, 0);

    int result = openFileForWriting(outputFormatContext, outputPath);
    if(result < 0){
        LOGE("openFileForWriting error: %d", result);
    }

    writeFileHeader(outputFormatContext);
}
void FFMpegManager::create(const QString &filePath, int formatId, const QStringList &paths, const QSize &size, int fps)
{
#ifdef HAVE_FFMPEG
	
	AVOutputFormat *fmt = guess_format(0, filePath.toLatin1().data(), 0);
	
	if ( !fmt )
	{
		fmt = guess_format("mpeg", NULL, NULL);
	}
	
// 	AVFormatParameters params, *ap = &params;
	
	switch(formatId)
	{
		case ExportInterface::ASF:
		{
			
		}
		break;
		case ExportInterface::AVI:
		{
			fmt->video_codec = CODEC_ID_MSMPEG4V3;
// 			video_st->codec.codec_tag = 0;
		}
		break;
		case ExportInterface::MOV:
		{
			
		}
		break;
		case ExportInterface::MPEG:
		{
		}
		break;
		case ExportInterface::RM:
		{
			
		}
		break;
		case ExportInterface::SWF:
		{
			
		}
		break;
		case ExportInterface::GIF:
		{
// 			AVImageFormat *imageFormat = guess_image_format(filePath.toLatin1().data());
// 			
// 			memset(ap, 0, sizeof(*ap));
// 			ap->image_format = imageFormat;
		}
		break;
		default: break;
	}
	
	AVFormatContext *oc = av_alloc_format_context();
	if ( !oc )
	{
		dError() << "Error while export";
		return;
	}

	
	oc->oformat = fmt;
	snprintf(oc->filename, sizeof(oc->filename), "%s", filePath.toLatin1().data());
	
	AVStream *video_st = addVideoStream(oc, fmt->video_codec, size.width(), size.height(), fps);
	
	if ( !video_st )
	{
		dError() << "Can't add video stream";
		return;
	}
	
	if (av_set_parameters(oc, 0) < 0)
	{
		dError() << "Invalid output format parameters";
		return ;
	}
	
	dump_format(oc, 0, filePath.toLatin1().data(), 1);
	
	if (!openVideo(oc, video_st) )
	{
		dError() << "Can't open video";
		return;
	}
	
	if (!(fmt->flags & AVFMT_NOFILE))
	{
		if (url_fopen(&oc->pb, filePath.toLatin1().data(), URL_WRONLY) < 0) 
		{
			dError() << "Could not open " << filePath.toLatin1().data();
			return;
		}
	}
	
	av_write_header(oc);
	
	double video_pts = 0.0;
	
	foreach(QString imagePath, paths)
	{
		if (video_st)
		{
			video_pts = (double)video_st->pts.val * video_st->time_base.num / video_st->time_base.den;
		}
		else
		{
			video_pts = 0.0;
		}
		if (!video_st || video_pts >= m_streamDuration )
		{
			break;
		}
		
		if (! writeVideoFrame(imagePath, oc, video_st, fps) )
		{
			break;
		}
	}
	
	closeVideo(oc, video_st);
	av_write_trailer(oc);
	
	for(int i = 0; i < oc->nb_streams; i++)
	{
		av_freep(&oc->streams[i]);
	}
	
	if (!(fmt->flags & AVFMT_NOFILE)) 
	{
		/* close the output file */
		url_fclose(&oc->pb);
	}
	
	av_free(oc);
#endif
}
示例#3
0
文件: AV.cpp 项目: danzeeeman/roxlu
// setup libAV related structs.
bool AV::setupAV() {

	ct.of = av_guess_format(NULL, "roxlu.flv", NULL);
	if(!ct.of) {
		printf("Cannot create flv AVOutputFormat\n");
		return false;
	}
	
	ct.c = avformat_alloc_context();
	if(!ct.c) {
		printf("Cannot allocate the AVFormatContext\n");
		return false;
	}
	ct.c->video_codec_id = CODEC_ID_H264;
	ct.c->debug = 3;
	ct.c->oformat = ct.of;

//	const char* output_filename = "tcp://127.0.0.1:6665";
//	const char* output_filename = "rtmp://gethinlewis.rtmphost.com";
	const char* output_filename = "rtmp://gethinlewis.rtmphost.com/event/_definst_";
//	const char* output_filename = "test.flv";
	snprintf(ct.c->filename, sizeof(ct.c->filename), "%s", output_filename);
	//ct.vs = addVideoStream(ct, ct.of->video_codec);

	printf("%d -- %d \n", CODEC_ID_H264, ct.of->video_codec);
	ct.of->video_codec = CODEC_ID_H264;
	ct.vs = addVideoStream(ct, ct.of->video_codec);
	if(!ct.vs) {
		printf("Cannot create video stream: %d.\n", ct.of->video_codec);
		return false;
	}
	
	if(!openVideo(ct)) {
		printf("Cannot open video stream.\n");
		return false;
	}
	
	//av_dict_set(&ct.c->metadata, "streamName", "video_test", 0);
	av_dict_set(&ct.c->metadata, "streamName", "livefeed", 0);
	
	if(use_audio) {
		bool use_mp3 = true;
		if(!use_mp3) {
			ct.asample_fmt = AV_SAMPLE_FMT_S16;
			ct.abit_rate = 64000;
			ct.asample_rate = 8000;
			ct.as = addAudioStream(ct, CODEC_ID_SPEEX);
		}
		else {
			ct.asample_fmt = AV_SAMPLE_FMT_S16;
			ct.abit_rate = 64000;
			ct.asample_rate = 44100;
			ct.as = addAudioStream(ct, CODEC_ID_MP3);
		}
		
		if(!ct.as) {
			printf("Cannot create audio stream.\n");
			return false;
		}
		
		if(!openAudio(ct)) {
			printf("Cannot open audio stream.\n");
			return false;
		}
	}
	
	av_dump_format(ct.c, 0, output_filename, 1);
	
	if(!(ct.of->flags & AVFMT_NOFILE)) {
		if(avio_open(&ct.c->pb, output_filename, AVIO_FLAG_WRITE) < 0) {
			printf("Cannot open: %s\n", output_filename);
			return false;
		}
	}
	avformat_write_header(ct.c, NULL);
	
	return true;
}