Beispiel #1
0
/* read and encode a frame of audio from the buffer */
static AVFrame *generate_video_frame(FFMpegContext *context, uint8_t *pixels, ReportList *reports)
{
	uint8_t *rendered_frame;

	AVCodecContext *c = context->video_stream->codec;
	int width = c->width;
	int height = c->height;
	AVFrame *rgb_frame;

	if (c->pix_fmt != PIX_FMT_BGR32) {
		rgb_frame = alloc_picture(PIX_FMT_BGR32, width, height);
		if (!rgb_frame) {
			BKE_report(reports, RPT_ERROR, "Could not allocate temporary frame");
			return NULL;
		}
	}
	else {
		rgb_frame = context->current_frame;
	}

	rendered_frame = pixels;

	/* Do RGBA-conversion and flipping in one step depending
	 * on CPU-Endianess */

	if (ENDIAN_ORDER == L_ENDIAN) {
		int y;
		for (y = 0; y < height; y++) {
			uint8_t *target = rgb_frame->data[0] + width * 4 * (height - y - 1);
			uint8_t *src = rendered_frame + width * 4 * y;
			uint8_t *end = src + width * 4;
			while (src != end) {
				target[3] = src[3];
				target[2] = src[2];
				target[1] = src[1];
				target[0] = src[0];

				target += 4;
				src += 4;
			}
		}
	}
	else {
		int y;
		for (y = 0; y < height; y++) {
			uint8_t *target = rgb_frame->data[0] + width * 4 * (height - y - 1);
			uint8_t *src = rendered_frame + width * 4 * y;
			uint8_t *end = src + width * 4;
			while (src != end) {
				target[3] = src[0];
				target[2] = src[1];
				target[1] = src[2];
				target[0] = src[3];

				target += 4;
				src += 4;
			}
		}
	}

	if (c->pix_fmt != PIX_FMT_BGR32) {
		sws_scale(context->img_convert_ctx, (const uint8_t *const *) rgb_frame->data,
		          rgb_frame->linesize, 0, c->height,
		          context->current_frame->data, context->current_frame->linesize);
		delete_picture(rgb_frame);
	}

	context->current_frame->format = PIX_FMT_BGR32;
	context->current_frame->width = width;
	context->current_frame->height = height;

	return context->current_frame;
}
Beispiel #2
0
void end_ffmpeg(void)
{
	unsigned int i;
	
	fprintf(stderr, "Closing ffmpeg...\n");

/*	if (audio_stream) { SEE UPPER
		write_audio_frames();
	}*/

#ifdef WITH_AUDASPACE
	if(audio_mixdown_device)
	{
		AUD_closeReadDevice(audio_mixdown_device);
		audio_mixdown_device = 0;
	}
#endif

	if (video_stream && video_stream->codec) {
		fprintf(stderr, "Flushing delayed frames...\n");
		flush_ffmpeg ();		
	}
	
	if (outfile) {
		av_write_trailer(outfile);
	}
	
	/* Close the video codec */

	if (video_stream && video_stream->codec) {
		avcodec_close(video_stream->codec);
		printf("zero video stream %p\n", video_stream);
		video_stream = 0;
	}

	
	/* Close the output file */
	if (outfile) {
		for (i = 0; i < outfile->nb_streams; i++) {
			if (&outfile->streams[i]) {
				av_freep(&outfile->streams[i]);
			}
		}
	}
	/* free the temp buffer */
	if (current_frame) {
		delete_picture(current_frame);
		current_frame = 0;
	}
	if (outfile && outfile->oformat) {
		if (!(outfile->oformat->flags & AVFMT_NOFILE)) {
			avio_close(outfile->pb);
		}
	}
	if (outfile) {
		av_free(outfile);
		outfile = 0;
	}
	if (video_buffer) {
		MEM_freeN(video_buffer);
		video_buffer = 0;
	}
	if (audio_output_buffer) {
		av_free(audio_output_buffer);
		audio_output_buffer = 0;
	}
	if (audio_input_buffer) {
		av_free(audio_input_buffer);
		audio_input_buffer = 0;
	}

	if (img_convert_ctx) {
		sws_freeContext(img_convert_ctx);
		img_convert_ctx = 0;
	}
}
Beispiel #3
0
static void end_ffmpeg_impl(FFMpegContext *context, int is_autosplit)
{
	PRINT("Closing ffmpeg...\n");

#if 0
	if (context->audio_stream) { /* SEE UPPER */
		write_audio_frames(context);
	}
#endif

#ifdef WITH_AUDASPACE
	if (is_autosplit == false) {
		if (context->audio_mixdown_device) {
			AUD_Device_free(context->audio_mixdown_device);
			context->audio_mixdown_device = 0;
		}
	}
#endif

	if (context->video_stream && context->video_stream->codec) {
		PRINT("Flushing delayed frames...\n");
		flush_ffmpeg(context);
	}
	
	if (context->outfile) {
		av_write_trailer(context->outfile);
	}
	
	/* Close the video codec */

	if (context->video_stream && context->video_stream->codec) {
		avcodec_close(context->video_stream->codec);
		PRINT("zero video stream %p\n", context->video_stream);
		context->video_stream = 0;
	}

	/* free the temp buffer */
	if (context->current_frame) {
		delete_picture(context->current_frame);
		context->current_frame = 0;
	}
	if (context->outfile && context->outfile->oformat) {
		if (!(context->outfile->oformat->flags & AVFMT_NOFILE)) {
			avio_close(context->outfile->pb);
		}
	}
	if (context->outfile) {
		avformat_free_context(context->outfile);
		context->outfile = 0;
	}
	if (context->audio_input_buffer) {
		av_free(context->audio_input_buffer);
		context->audio_input_buffer = 0;
	}
#ifndef FFMPEG_HAVE_ENCODE_AUDIO2
	if (context->audio_output_buffer) {
		av_free(context->audio_output_buffer);
		context->audio_output_buffer = 0;
	}
#endif

	if (context->audio_deinterleave_buffer) {
		av_free(context->audio_deinterleave_buffer);
		context->audio_deinterleave_buffer = 0;
	}

	if (context->img_convert_ctx) {
		sws_freeContext(context->img_convert_ctx);
		context->img_convert_ctx = 0;
	}
}
Beispiel #4
0
static void end_ffmpeg_impl(int is_autosplit)
{
	unsigned int i;
	
	PRINT("Closing ffmpeg...\n");

#if 0
	if (audio_stream) { /* SEE UPPER */
		write_audio_frames();
	}
#endif

#ifdef WITH_AUDASPACE
	if (is_autosplit == false) {
		if (audio_mixdown_device) {
			AUD_closeReadDevice(audio_mixdown_device);
			audio_mixdown_device = 0;
		}
	}
#endif

	if (video_stream && video_stream->codec) {
		PRINT("Flushing delayed frames...\n");
		flush_ffmpeg();
	}
	
	if (outfile) {
		av_write_trailer(outfile);
	}
	
	/* Close the video codec */

	if (video_stream && video_stream->codec) {
		avcodec_close(video_stream->codec);
		PRINT("zero video stream %p\n", video_stream);
		video_stream = 0;
	}

	
	/* Close the output file */
	if (outfile) {
		for (i = 0; i < outfile->nb_streams; i++) {
			if (&outfile->streams[i]) {
				av_freep(&outfile->streams[i]);
			}
		}
	}
	/* free the temp buffer */
	if (current_frame) {
		delete_picture(current_frame);
		current_frame = 0;
	}
	if (outfile && outfile->oformat) {
		if (!(outfile->oformat->flags & AVFMT_NOFILE)) {
			avio_close(outfile->pb);
		}
	}
	if (outfile) {
		av_free(outfile);
		outfile = 0;
	}
	if (audio_input_buffer) {
		av_free(audio_input_buffer);
		audio_input_buffer = 0;
	}
#ifndef FFMPEG_HAVE_ENCODE_AUDIO2
	if (audio_output_buffer) {
		av_free(audio_output_buffer);
		audio_output_buffer = 0;
	}
#endif

	if (audio_deinterleave_buffer) {
		av_free(audio_deinterleave_buffer);
		audio_deinterleave_buffer = 0;
	}

	if (img_convert_ctx) {
		sws_freeContext(img_convert_ctx);
		img_convert_ctx = 0;
	}
}