예제 #1
0
파일: sequence.c 프로젝트: flv0/veejay
static	int	veejay_get_image_data(veejay_preview_t *vp, veejay_track_t *v )
{
	if(!v->have_frame && (v->width <= 0 || v->height <= 0) )
		return 1;

	gint res = sendvims( v, VIMS_RGB24_IMAGE, "%d %d", v->width,v->height );
	if( res <= 0 )
	{
		v->have_frame = 0;
		return res;
	}
	gint bw = 0;

	res = recvvims( v, 7, &bw, v->data_buffer );
	if( res <= 0 || bw <= 0 )
	{
		veejay_msg(VEEJAY_MSG_WARNING, "Can't get a preview image! Only got %d bytes", bw);
		v->have_frame = 0;
		return res;
	}

	int expected_len = (v->width * v->height);
	int srcfmt = PIX_FMT_YUVJ420P; //default
        if(v->grey_scale) {
		srcfmt = PIX_FMT_GRAY8;
	}
	else {
		expected_len += (v->width*v->height/4);
		expected_len += (v->width*v->height/4);
	}

	if( bw != expected_len )
	{
		veejay_msg(VEEJAY_MSG_WARNING, "Corrupted image. Should be %dx%d but have %d bytes %s",
			v->width,v->height,abs(bw - expected_len),( (bw-expected_len<0)? "too few" : "too many") );
		v->have_frame = 0;
		return 0;
	}

	uint8_t *in = v->data_buffer;
	uint8_t *out = v->tmp_buffer;
	
	v->bw = 0;
	
	VJFrame *src1 = yuv_yuv_template( in, in + (v->width * v->height), in + (v->width * v->height) + (v->width*v->height)/4,v->width,v->height, srcfmt );
	VJFrame *dst1 = yuv_rgb_template( out, v->width,v->height, PIX_FMT_BGR24 );

	yuv_convert_any_ac( src1, dst1, src1->format, dst1->format );	

	v->have_frame = 1;

	free(src1);
	free(dst1);

	return bw;
}
예제 #2
0
파일: vj-net.c 프로젝트: flv0/veejay
int	net_thread_get_frame( vj_tag *tag, uint8_t *buffer[3] )
{
	threaded_t *t = (threaded_t*) tag->priv;
	
	int state = 0;

	/* frame ready ? */
	lock(t);
	state = t->state;
	if( state == 0 || t->bufsize == 0 || t->buf == NULL ) {
		unlock(t);
		return 1; // not active or no frame
	}	// just continue when t->have_frame == 0

	//@ color space convert frame	
	int b_len = t->in_w * t->in_h;
	int buvlen = b_len;

	if( PIX_FMT_YUV420P == t->in_fmt || PIX_FMT_YUVJ420P == t->in_fmt )
		buvlen = b_len/4;
	else
		buvlen = b_len/2;
	
	if( t->a == NULL )
		t->a = yuv_yuv_template( t->buf, t->buf + b_len, t->buf+ b_len+ buvlen,t->in_w,t->in_h, t->in_fmt);
	
	if( t->b == NULL ) 
		t->b = yuv_yuv_template( buffer[0],buffer[1], buffer[2],t->w,t->h,t->f);
	
	if( t->scaler == NULL ) {
		sws_template sws_templ;
		memset( &sws_templ, 0, sizeof(sws_template));
		sws_templ.flags = yuv_which_scaler();
		t->scaler = yuv_init_swscaler( t->a,t->b, &sws_templ, yuv_sws_get_cpu_flags() );
	}

	yuv_convert_and_scale( t->scaler, t->a,t->b );
	
	t->have_frame = 0;
	unlock(t);

	return 1;
}
예제 #3
0
파일: vj-sdl.c 프로젝트: c0ntrol/veejay
vj_sdl *vj_sdl_allocate(int width, int height, int fmt, int use_key, int use_mouse, int show_cursor)
{
    vj_sdl *vjsdl = (vj_sdl *) vj_malloc(sizeof(vj_sdl));
    if (!vjsdl)
	return NULL;

    veejay_memset( vjsdl,0,sizeof(vj_sdl));

    vjsdl->flags[0] = 0;
    vjsdl->flags[1] = 0;
//   vjsdl->mouse_motion = 1;
  //  vjsdl->use_keyboard = 1;
    vjsdl->use_keyboard = use_key;
    vjsdl->mouse_motion = use_mouse;
    vjsdl->show_cursor = show_cursor;
    vjsdl->pix_format = SDL_YUY2_OVERLAY; 
    vjsdl->pix_fmt = fmt;
    vjsdl->width = width;
    vjsdl->height = height;
    vjsdl->frame_size = width * height;
    vjsdl->sw_scale_width = 0;
    vjsdl->sw_scale_height = 0;
    vjsdl->custom_geo[0] = -1;
    vjsdl->custom_geo[1] = -1;
    vjsdl->display = NULL;
	switch(fmt) {
	 //@ dont use YUVJ here - on blitting it to SDL it will be converted to YUV clamped for YUYJ422
	case FMT_422F:vjsdl->ffmpeg_pixfmt = PIX_FMT_YUV422P;break;
	case FMT_422:vjsdl->ffmpeg_pixfmt = PIX_FMT_YUV422P;break;
	}
    sws_template templ;	
    memset(&templ,0,sizeof(sws_template));
    templ.flags = yuv_which_scaler();
    VJFrame *src = yuv_yuv_template( NULL,NULL,NULL,vjsdl->width,vjsdl->height, vjsdl->ffmpeg_pixfmt );
    VJFrame *dst = yuv_yuv_template(  NULL,NULL,NULL,vjsdl->width,vjsdl->height,PIX_FMT_YUYV422);
    vjsdl->scaler = yuv_init_swscaler( src,dst, &templ, yuv_sws_get_cpu_flags() );

    vjsdl->src_frame = (void*) src;
    vjsdl->dst_frame = (void*) dst;

    return vjsdl;
}
예제 #4
0
파일: vj-misc.c 프로젝트: flv0/veejay
int vj_perform_screenshot2(veejay_t * info, uint8_t ** src)
{
    FILE *frame;
    int res = 0;
    uint8_t *jpeg_buff;
    VJFrame tmp;
    int jpeg_size;

    video_playback_setup *settings = info->settings;

    jpeg_buff = (uint8_t *) malloc( 65535 * 10);
    if (!jpeg_buff)
		return -1;

    vj_get_yuv_template( &tmp,
				info->video_output_width,
				info->video_output_height,
				info->pixel_format );

    if( tmp.shift_v == 0 )
    {
	tmp.data[0] = (uint8_t*) vj_malloc(sizeof(uint8_t) * tmp.len * 3);
	tmp.data[1] = tmp.data[0] + tmp.len;
	tmp.data[2] = tmp.data[1] + tmp.len + tmp.uv_len;

	tmp.format = PIX_FMT_YUVJ420P;
	
	VJFrame *srci = yuv_yuv_template( src[0],src[1],src[2], info->video_output_width,
					info->video_output_height , PIX_FMT_YUVJ422P);

	yuv_convert_any_ac( srci,&tmp, srci->format, tmp.format );

    	free(srci);
    }
    else
    {
	tmp.data[0] = src[0];
	tmp.data[1] = src[1];
	tmp.data[2] = src[2];
    }	

	if(info->uc->filename == NULL) 
	{
		info->uc->filename = (char*) malloc(sizeof(char) * 12); 
		sprintf(info->uc->filename, "%06d.jpg", info->settings->current_frame_num );
	}
    frame = fopen(info->uc->filename, "wb");

    if (frame)
    {	
    	jpeg_size = encode_jpeg_raw(jpeg_buff, (65535*10), 100,
				settings->dct_method,  
				info->current_edit_list->video_inter,0,
				info->video_output_width,
				info->video_output_height,
				tmp.data[0],
				tmp.data[1], tmp.data[2]);

   	 res = fwrite(jpeg_buff, jpeg_size, 1, frame);
   	 fclose(frame);
    	 if(res) 
		veejay_msg(VEEJAY_MSG_INFO, "Dumped frame to %s", info->uc->filename);
    }

    if (jpeg_buff)
	free(jpeg_buff);

    if( tmp.shift_v == 0 )
    {
	free(tmp.data[0]);
    }

    return res;
}
예제 #5
0
파일: avhelper.c 프로젝트: flv0/veejay
void	*avhelper_get_decoder( const char *filename, int dst_pixfmt, int dst_width, int dst_height ) {
	char errbuf[512];
	el_decoder_t *x = (el_decoder_t*) vj_calloc( sizeof( el_decoder_t ));
	if(!x) {
		return NULL;
	}

#if LIBAVCODEC_BUILD > 5400
	int err = avformat_open_input( &(x->avformat_ctx), filename, NULL, NULL );
#else
	int err = av_open_input_file( &(x->avformat_ctx),filename,NULL,0,NULL );
#endif

	if(err < 0 ) {
		av_strerror( err, errbuf, sizeof(errbuf));
		veejay_msg(VEEJAY_MSG_DEBUG, "%s: %s", filename,errbuf );
		free(x);
		return NULL;
	}

#if LIBAVCODEC_BUILD > 5400
	/* avformat_find_stream_info leaks memory */
	err = avformat_find_stream_info( x->avformat_ctx, NULL );
#else
	err = av_find_stream_info( x->avformat_ctx );
#endif
	if( err < 0 ) {
		av_strerror( err, errbuf, sizeof(errbuf));
		veejay_msg(VEEJAY_MSG_DEBUG, "%s: %s" ,filename,errbuf );
	}

	if(err < 0 ) {
		avhelper_close_input_file( x->avformat_ctx );
		free(x);
		return NULL;
	}
	
	unsigned int i,j;
	unsigned int n = x->avformat_ctx->nb_streams;
	int vi = -1;

	for( i = 0; i < n; i ++ )
	{
		if( !x->avformat_ctx->streams[i]->codec )
			continue;

		if( x->avformat_ctx->streams[i]->codec->codec_type > CODEC_ID_FIRST_SUBTITLE ) 
			continue;
		
		if( x->avformat_ctx->streams[i]->codec->codec_type < CODEC_ID_FIRST_AUDIO )
		{
				int sup_codec = 0;
				for( j = 0; _supported_codecs[j].name != NULL; j ++ ) {
					if( x->avformat_ctx->streams[i]->codec->codec_id == _supported_codecs[j].id ) {
						sup_codec = 1;
						goto further;
					}
				}	
further:
				if( !sup_codec ) {
					avhelper_close_input_file( x->avformat_ctx );
					free(x);
					return NULL;
				}
				x->codec = avcodec_find_decoder( x->avformat_ctx->streams[i]->codec->codec_id );
				if(x->codec == NULL ) 
				{
					avhelper_close_input_file( x->avformat_ctx );
					free(x);
					return NULL;
				}
				vi = i;

				veejay_msg(VEEJAY_MSG_DEBUG, "FFmpeg: video stream %d, codec_id %d", vi, x->avformat_ctx->streams[i]->codec->codec_id);

				break;
		}
	}

	if( vi == -1 ) {
		veejay_msg(VEEJAY_MSG_DEBUG, "FFmpeg: No video streams found");
		avhelper_close_input_file( x->avformat_ctx );
		free(x);
		return NULL;
	}

	x->codec_ctx = x->avformat_ctx->streams[vi]->codec;

	int wid = dst_width;
	int hei = dst_height;

	if( wid == -1 && hei == -1 ) {
		wid = x->codec_ctx->width;
		hei = x->codec_ctx->height;
	}

#if LIBAVCODEC_BUILD > 5400
	if ( avcodec_open2( x->codec_ctx, x->codec, NULL ) < 0 )
#else
	if ( avcodec_open( x->codec_ctx, x->codec ) < 0 ) 
#endif
	{
		avhelper_close_input_file( x->avformat_ctx );
		free(x);
		return NULL;
	}

	veejay_memset( &(x->pkt), 0, sizeof(AVPacket));
	AVFrame *f = avcodec_alloc_frame();
	x->output = yuv_yuv_template( NULL,NULL,NULL, wid, hei, dst_pixfmt );

	int got_picture = 0;
	while(1) {
	    int ret = av_read_frame(x->avformat_ctx, &(x->pkt));
		if( ret < 0 )
			break;

		if ( x->pkt.stream_index == vi ) {
			avcodec_decode_video( x->codec_ctx,f,&got_picture, x->pkt.data, x->pkt.size );
			avhelper_frame_unref( f );
		}
				
		av_free_packet( &(x->pkt) );	

		if( got_picture )
			break;
	}
	av_free(f);

	if(!got_picture) {
		veejay_msg(VEEJAY_MSG_ERROR, "FFmpeg: Unable to get whole picture from %s", filename );
		avcodec_close( x->codec_ctx );
		avhelper_close_input_file( x->avformat_ctx );
		free(x->output);
		free(x);
		return NULL;
	}

	x->pixfmt = x->codec_ctx->pix_fmt;
	x->codec_id = x->codec_ctx->codec_id;
	x->frame = avcodec_alloc_frame();
	x->input = yuv_yuv_template( NULL,NULL,NULL, x->codec_ctx->width,x->codec_ctx->height, x->pixfmt );

	sws_template sws_tem;
    veejay_memset(&sws_tem, 0,sizeof(sws_template));
    sws_tem.flags = yuv_which_scaler();
    x->scaler = yuv_init_swscaler( x->input,x->output, &sws_tem, yuv_sws_get_cpu_flags());
	
	if( x->scaler == NULL ) {
		veejay_msg(VEEJAY_MSG_ERROR,"FFmpeg: Failed to get scaler context for %dx%d in %d to %dx%d in %d",
				x->codec_ctx->width,x->codec_ctx->height, x->pixfmt,
				wid,hei,dst_pixfmt);
		av_free(f);
		avcodec_close( x->codec_ctx );
		avhelper_close_input_file( x->avformat_ctx );
		free(x->output);
		free(x->input);
		free(x);
		return NULL;
	}
	
	return (void*) x;
}