Beispiel #1
0
static int start_ffmpeg_impl(struct RenderData *rd, int rectx, int recty, ReportList *reports)
{
	/* Handle to the output file */
	AVFormatContext* of;
	AVOutputFormat* fmt;
	char name[256];
	const char ** exts;

	ffmpeg_type = rd->ffcodecdata.type;
	ffmpeg_codec = rd->ffcodecdata.codec;
	ffmpeg_audio_codec = rd->ffcodecdata.audio_codec;
	ffmpeg_video_bitrate = rd->ffcodecdata.video_bitrate;
	ffmpeg_audio_bitrate = rd->ffcodecdata.audio_bitrate;
	ffmpeg_gop_size = rd->ffcodecdata.gop_size;
	ffmpeg_autosplit = rd->ffcodecdata.flags
		& FFMPEG_AUTOSPLIT_OUTPUT;
	
	do_init_ffmpeg();

	/* Determine the correct filename */
	filepath_ffmpeg(name, rd);
	fprintf(stderr, "Starting output to %s(ffmpeg)...\n"
		"  Using type=%d, codec=%d, audio_codec=%d,\n"
		"  video_bitrate=%d, audio_bitrate=%d,\n"
		"  gop_size=%d, autosplit=%d\n"
		"  render width=%d, render height=%d\n", 
		name, ffmpeg_type, ffmpeg_codec, ffmpeg_audio_codec,
		ffmpeg_video_bitrate, ffmpeg_audio_bitrate,
		ffmpeg_gop_size, ffmpeg_autosplit, rectx, recty);
	
	exts = get_file_extensions(ffmpeg_type);
	if (!exts) {
		BKE_report(reports, RPT_ERROR, "No valid formats found.");
		return 0;
	}
	fmt = av_guess_format(NULL, exts[0], NULL);
	if (!fmt) {
		BKE_report(reports, RPT_ERROR, "No valid formats found.");
		return 0;
	}

	of = avformat_alloc_context();
	if (!of) {
		BKE_report(reports, RPT_ERROR, "Error opening output file");
		return 0;
	}
	
	of->oformat = fmt;
	of->packet_size= rd->ffcodecdata.mux_packet_size;
	if (ffmpeg_audio_codec != CODEC_ID_NONE) {
		of->mux_rate = rd->ffcodecdata.mux_rate;
	} else {
		of->mux_rate = 0;
	}

	of->preload = (int)(0.5*AV_TIME_BASE);
	of->max_delay = (int)(0.7*AV_TIME_BASE);

	fmt->audio_codec = ffmpeg_audio_codec;

	BLI_snprintf(of->filename, sizeof(of->filename), "%s", name);
	/* set the codec to the user's selection */
	switch(ffmpeg_type) {
	case FFMPEG_AVI:
	case FFMPEG_MOV:
	case FFMPEG_MKV:
		fmt->video_codec = ffmpeg_codec;
		break;
	case FFMPEG_OGG:
		fmt->video_codec = CODEC_ID_THEORA;
		break;
	case FFMPEG_DV:
		fmt->video_codec = CODEC_ID_DVVIDEO;
		break;
	case FFMPEG_MPEG1:
		fmt->video_codec = CODEC_ID_MPEG1VIDEO;
		break;
	case FFMPEG_MPEG2:
		fmt->video_codec = CODEC_ID_MPEG2VIDEO;
		break;
	case FFMPEG_H264:
		fmt->video_codec = CODEC_ID_H264;
		break;
	case FFMPEG_XVID:
		fmt->video_codec = CODEC_ID_MPEG4;
		break;
	case FFMPEG_FLV:
		fmt->video_codec = CODEC_ID_FLV1;
		break;
	case FFMPEG_MP3:
		fmt->audio_codec = CODEC_ID_MP3;
	case FFMPEG_WAV:
		fmt->video_codec = CODEC_ID_NONE;
		break;
	case FFMPEG_MPEG4:
	default:
		fmt->video_codec = CODEC_ID_MPEG4;
		break;
	}
	if (fmt->video_codec == CODEC_ID_DVVIDEO) {
		if (rectx != 720) {
			BKE_report(reports, RPT_ERROR, "Render width has to be 720 pixels for DV!");
			return 0;
		}
		if (rd->frs_sec != 25 && recty != 480) {
			BKE_report(reports, RPT_ERROR, "Render height has to be 480 pixels for DV-NTSC!");
			return 0;
		}
		if (rd->frs_sec == 25 && recty != 576) {
			BKE_report(reports, RPT_ERROR, "Render height has to be 576 pixels for DV-PAL!");
			return 0;
		}
	}
	
	if (ffmpeg_type == FFMPEG_DV) {
		fmt->audio_codec = CODEC_ID_PCM_S16LE;
		if (ffmpeg_audio_codec != CODEC_ID_NONE && rd->ffcodecdata.audio_mixrate != 48000 && rd->ffcodecdata.audio_channels != 2) {
			BKE_report(reports, RPT_ERROR, "FFMPEG only supports 48khz / stereo audio for DV!");
			return 0;
		}
	}
	
	if (fmt->video_codec != CODEC_ID_NONE) {
		video_stream = alloc_video_stream(rd, fmt->video_codec, of, rectx, recty);
		printf("alloc video stream %p\n", video_stream);
		if (!video_stream) {
			BKE_report(reports, RPT_ERROR, "Error initializing video stream.");
			return 0;
		}
	}

	if (ffmpeg_audio_codec != CODEC_ID_NONE) {
		audio_stream = alloc_audio_stream(rd, fmt->audio_codec, of);
		if (!audio_stream) {
			BKE_report(reports, RPT_ERROR, "Error initializing audio stream.");
			return 0;
		}
	}
	if (av_set_parameters(of, NULL) < 0) {
		BKE_report(reports, RPT_ERROR, "Error setting output parameters.");
		return 0;
	}
	if (!(fmt->flags & AVFMT_NOFILE)) {
		if (avio_open(&of->pb, name, AVIO_FLAG_WRITE) < 0) {
			BKE_report(reports, RPT_ERROR, "Could not open file for writing.");
			return 0;
		}
	}

	if (av_write_header(of) < 0) {
		BKE_report(reports, RPT_ERROR, "Could not initialize streams. Probably unsupported codec combination.");
		return 0;
	}

	outfile = of;
	av_dump_format(of, 0, name, 1);

	return 1;
}
Beispiel #2
0
static ImBuf * avi_fetchibuf (struct anim *anim, int position) {
	ImBuf *ibuf = NULL;
	int *tmp;
	int y;
	
	if (anim == NULL) return (NULL);

#if defined(_WIN32) && !defined(FREE_WINDOWS)
	if (anim->avistreams) {
		LPBITMAPINFOHEADER lpbi;

		if (anim->pgf) {
			lpbi = AVIStreamGetFrame(anim->pgf, position + AVIStreamStart(anim->pavi[anim->firstvideo]));
			if (lpbi) {
				ibuf = IMB_ibImageFromMemory((unsigned char *) lpbi, 100, IB_rect);
//Oh brother...
			}
		}
	} else {
#else
	if (1) {
#endif
		ibuf = IMB_allocImBuf (anim->x, anim->y, 24, IB_rect);

		tmp = AVI_read_frame (anim->avi, AVI_FORMAT_RGB32, position,
			AVI_get_stream(anim->avi, AVIST_VIDEO, 0));
		
		if (tmp == NULL) {
			printf ("Error reading frame from AVI");
			IMB_freeImBuf (ibuf);
			return NULL;
		}

		for (y=0; y < anim->y; y++) {
			memcpy (&(ibuf->rect)[((anim->y-y)-1)*anim->x],  &tmp[y*anim->x],  
				anim->x * 4);
		}
		
		MEM_freeN (tmp);
	}
	
	ibuf->profile = IB_PROFILE_SRGB;
	
	return ibuf;
}

#ifdef WITH_FFMPEG

extern void do_init_ffmpeg(void);

static int startffmpeg(struct anim * anim) {
	int            i, videoStream;

	AVCodec *pCodec;
	AVFormatContext *pFormatCtx;
	AVCodecContext *pCodecCtx;
	int frs_num;
	double frs_den;
	int streamcount;

#ifdef FFMPEG_SWSCALE_COLOR_SPACE_SUPPORT
	/* The following for color space determination */
	int srcRange, dstRange, brightness, contrast, saturation;
	int *table;
	const int *inv_table;
#endif

	if (anim == 0) return(-1);

	streamcount = anim->streamindex;

	do_init_ffmpeg();

	if(av_open_input_file(&pFormatCtx, anim->name, NULL, 0, NULL)!=0) {
		return -1;
	}

	if(av_find_stream_info(pFormatCtx)<0) {
		av_close_input_file(pFormatCtx);
		return -1;
	}

	av_dump_format(pFormatCtx, 0, anim->name, 0);


	/* Find the video stream */
	videoStream = -1;

	for(i = 0; i < pFormatCtx->nb_streams; i++)
		if (pFormatCtx->streams[i]->codec->codec_type
		   == AVMEDIA_TYPE_VIDEO) {
			if (streamcount > 0) {
				streamcount--;
				continue;
			}
			videoStream = i;
			break;
		}

	if(videoStream==-1) {
		av_close_input_file(pFormatCtx);
		return -1;
	}

	pCodecCtx = pFormatCtx->streams[videoStream]->codec;

	/* Find the decoder for the video stream */
	pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
	if(pCodec == NULL) {
		av_close_input_file(pFormatCtx);
		return -1;
	}

	pCodecCtx->workaround_bugs = 1;

	if(avcodec_open(pCodecCtx, pCodec) < 0) {
		av_close_input_file(pFormatCtx);
		return -1;
	}

	anim->duration = ceil(pFormatCtx->duration
		* av_q2d(pFormatCtx->streams[videoStream]->r_frame_rate) 
		/ AV_TIME_BASE);

	frs_num	= pFormatCtx->streams[videoStream]->r_frame_rate.num;
	frs_den = pFormatCtx->streams[videoStream]->r_frame_rate.den;

	frs_den *= AV_TIME_BASE;

	while (frs_num % 10 == 0 && frs_den >= 2.0 && frs_num > 10) {
		frs_num /= 10;
		frs_den /= 10;
	}

	anim->frs_sec = frs_num;
	anim->frs_sec_base = frs_den;

	anim->params = 0;

	anim->x = pCodecCtx->width;
	anim->y = pCodecCtx->height;
	anim->interlacing = 0;
	anim->orientation = 0;
	anim->framesize = anim->x * anim->y * 4;

	anim->curposition = -1;
	anim->last_frame = 0;
	anim->last_pts = -1;
	anim->next_pts = -1;
	anim->next_undecoded_pts = -1;
	anim->next_packet.stream_index = -1;

	anim->pFormatCtx = pFormatCtx;
	anim->pCodecCtx = pCodecCtx;
	anim->pCodec = pCodec;
	anim->videoStream = videoStream;

	anim->pFrame = avcodec_alloc_frame();
	anim->pFrameDeinterlaced = avcodec_alloc_frame();
	anim->pFrameRGB = avcodec_alloc_frame();

	if (avpicture_get_size(PIX_FMT_RGBA, anim->x, anim->y)
		!= anim->x * anim->y * 4) {
		fprintf (stderr,
			 "ffmpeg has changed alloc scheme ... ARGHHH!\n");
		avcodec_close(anim->pCodecCtx);
		av_close_input_file(anim->pFormatCtx);
		av_free(anim->pFrameRGB);
		av_free(anim->pFrameDeinterlaced);
		av_free(anim->pFrame);
		anim->pCodecCtx = NULL;
		return -1;
	}

	if (anim->ib_flags & IB_animdeinterlace) {
		avpicture_fill((AVPicture*) anim->pFrameDeinterlaced, 
				   MEM_callocN(avpicture_get_size(
						   anim->pCodecCtx->pix_fmt,
						   anim->x, anim->y), 
					   "ffmpeg deinterlace"), 
				   anim->pCodecCtx->pix_fmt, anim->x, anim->y);
	}

	if (pCodecCtx->has_b_frames) {
		anim->preseek = 25; /* FIXME: detect gopsize ... */
	} else {
		anim->preseek = 0;
	}
	
	anim->img_convert_ctx = sws_getContext(
		anim->pCodecCtx->width,
		anim->pCodecCtx->height,
		anim->pCodecCtx->pix_fmt,
		anim->pCodecCtx->width,
		anim->pCodecCtx->height,
		PIX_FMT_RGBA,
		SWS_FAST_BILINEAR | SWS_PRINT_INFO,
		NULL, NULL, NULL);
		
	if (!anim->img_convert_ctx) {
		fprintf (stderr,
			 "Can't transform color space??? Bailing out...\n");
		avcodec_close(anim->pCodecCtx);
		av_close_input_file(anim->pFormatCtx);
		av_free(anim->pFrameRGB);
		av_free(anim->pFrameDeinterlaced);
		av_free(anim->pFrame);
		anim->pCodecCtx = NULL;
		return -1;
	}

#ifdef FFMPEG_SWSCALE_COLOR_SPACE_SUPPORT
	/* Try do detect if input has 0-255 YCbCR range (JFIF Jpeg MotionJpeg) */
	if (!sws_getColorspaceDetails(anim->img_convert_ctx, (int**)&inv_table, &srcRange,
		&table, &dstRange, &brightness, &contrast, &saturation)) {

		srcRange = srcRange || anim->pCodecCtx->color_range == AVCOL_RANGE_JPEG;
		inv_table = sws_getCoefficients(anim->pCodecCtx->colorspace);

		if(sws_setColorspaceDetails(anim->img_convert_ctx, (int *)inv_table, srcRange,
			table, dstRange, brightness, contrast, saturation)) {

			printf("Warning: Could not set libswscale colorspace details.\n");
			}
	}
	else {
		printf("Warning: Could not set libswscale colorspace details.\n");
	}
#endif
		
	return (0);
}
Beispiel #3
0
static int isffmpeg (const char *filename)
{
	AVFormatContext *pFormatCtx;
	unsigned int i;
	int videoStream;
	AVCodec *pCodec;
	AVCodecContext *pCodecCtx;

	do_init_ffmpeg();

	if( BLI_testextensie(filename, ".swf") ||
		BLI_testextensie(filename, ".jpg") ||
		BLI_testextensie(filename, ".png") ||
		BLI_testextensie(filename, ".dds") ||
		BLI_testextensie(filename, ".tga") ||
		BLI_testextensie(filename, ".bmp") ||
		BLI_testextensie(filename, ".exr") ||
		BLI_testextensie(filename, ".cin") ||
		BLI_testextensie(filename, ".wav")) return 0;

	if(av_open_input_file(&pFormatCtx, filename, NULL, 0, NULL)!=0) {
		if(UTIL_DEBUG) fprintf(stderr, "isffmpeg: av_open_input_file failed\n");
		return 0;
	}

	if(av_find_stream_info(pFormatCtx)<0) {
		if(UTIL_DEBUG) fprintf(stderr, "isffmpeg: av_find_stream_info failed\n");
		av_close_input_file(pFormatCtx);
		return 0;
	}

	if(UTIL_DEBUG) av_dump_format(pFormatCtx, 0, filename, 0);


		/* Find the first video stream */
	videoStream=-1;
	for(i=0; i<pFormatCtx->nb_streams; i++)
		if(pFormatCtx->streams[i] &&
		   pFormatCtx->streams[i]->codec && 
		  (pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO))
		{
			videoStream=i;
			break;
		}

	if(videoStream==-1) {
		av_close_input_file(pFormatCtx);
		return 0;
	}

	pCodecCtx = pFormatCtx->streams[videoStream]->codec;

		/* Find the decoder for the video stream */
	pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
	if(pCodec==NULL) {
		av_close_input_file(pFormatCtx);
		return 0;
	}

	if(avcodec_open(pCodecCtx, pCodec)<0) {
		av_close_input_file(pFormatCtx);
		return 0;
	}

	avcodec_close(pCodecCtx);
	av_close_input_file(pFormatCtx);

	return 1;
}