int contourextract_malloc(void **d, int width, int height) { contourextract_data *my; *d = (void*) vj_calloc(sizeof(contourextract_data)); my = (contourextract_data*) *d; dw_ = nearest_div( width / 8 ); dh_ = nearest_div( height / 8 ); my->current = (uint8_t*) vj_calloc( ru8( sizeof(uint8_t) * dw_ * dh_ * 3 ) ); my->bitmap = (uint8_t*) vj_calloc( ru8(sizeof(uint8_t) * width * height )); if(static_bg == NULL) static_bg = (uint8_t*) vj_calloc( ru8( width + width * height * sizeof(uint8_t)) ); if(dt_map == NULL ) dt_map = (uint32_t*) vj_calloc( ru8(width * height * sizeof(uint32_t) + width ) ); veejay_memset( &template_, 0, sizeof(sws_template) ); veejay_memset( proj_, 0, sizeof(proj_) ); template_.flags = 1; vj_get_yuvgrey_template( &to_shrink_, width, height ); vj_get_yuvgrey_template( &shrinked_ , dw_, dh_ ); shrink_ = yuv_init_swscaler( &(to_shrink_), &(shrinked_), &template_ , yuv_sws_get_cpu_flags() ); points = (point_t**) vj_calloc( sizeof(point_t*) * 12000 ); int i; for( i = 0; i < 12000; i ++ ) { points[i] = (point_t*) vj_calloc( sizeof(point_t) ); } veejay_memset( x_, 0, sizeof(x_) ); veejay_memset( y_, 0, sizeof(y_) ); return 1; }
int net_thread_get_frame( vj_tag *tag, uint8_t *buffer[3] ) { threaded_t *t = (threaded_t*) tag->priv; int state = 0; /* frame ready ? */ lock(t); state = t->state; if( state == 0 || t->bufsize == 0 || t->buf == NULL ) { unlock(t); return 1; // not active or no frame } // just continue when t->have_frame == 0 //@ color space convert frame int b_len = t->in_w * t->in_h; int buvlen = b_len; if( PIX_FMT_YUV420P == t->in_fmt || PIX_FMT_YUVJ420P == t->in_fmt ) buvlen = b_len/4; else buvlen = b_len/2; if( t->a == NULL ) t->a = yuv_yuv_template( t->buf, t->buf + b_len, t->buf+ b_len+ buvlen,t->in_w,t->in_h, t->in_fmt); if( t->b == NULL ) t->b = yuv_yuv_template( buffer[0],buffer[1], buffer[2],t->w,t->h,t->f); if( t->scaler == NULL ) { sws_template sws_templ; memset( &sws_templ, 0, sizeof(sws_template)); sws_templ.flags = yuv_which_scaler(); t->scaler = yuv_init_swscaler( t->a,t->b, &sws_templ, yuv_sws_get_cpu_flags() ); } yuv_convert_and_scale( t->scaler, t->a,t->b ); t->have_frame = 0; unlock(t); return 1; }
vj_sdl *vj_sdl_allocate(int width, int height, int fmt, int use_key, int use_mouse, int show_cursor) { vj_sdl *vjsdl = (vj_sdl *) vj_malloc(sizeof(vj_sdl)); if (!vjsdl) return NULL; veejay_memset( vjsdl,0,sizeof(vj_sdl)); vjsdl->flags[0] = 0; vjsdl->flags[1] = 0; // vjsdl->mouse_motion = 1; // vjsdl->use_keyboard = 1; vjsdl->use_keyboard = use_key; vjsdl->mouse_motion = use_mouse; vjsdl->show_cursor = show_cursor; vjsdl->pix_format = SDL_YUY2_OVERLAY; vjsdl->pix_fmt = fmt; vjsdl->width = width; vjsdl->height = height; vjsdl->frame_size = width * height; vjsdl->sw_scale_width = 0; vjsdl->sw_scale_height = 0; vjsdl->custom_geo[0] = -1; vjsdl->custom_geo[1] = -1; vjsdl->display = NULL; switch(fmt) { //@ dont use YUVJ here - on blitting it to SDL it will be converted to YUV clamped for YUYJ422 case FMT_422F:vjsdl->ffmpeg_pixfmt = PIX_FMT_YUV422P;break; case FMT_422:vjsdl->ffmpeg_pixfmt = PIX_FMT_YUV422P;break; } sws_template templ; memset(&templ,0,sizeof(sws_template)); templ.flags = yuv_which_scaler(); VJFrame *src = yuv_yuv_template( NULL,NULL,NULL,vjsdl->width,vjsdl->height, vjsdl->ffmpeg_pixfmt ); VJFrame *dst = yuv_yuv_template( NULL,NULL,NULL,vjsdl->width,vjsdl->height,PIX_FMT_YUYV422); vjsdl->scaler = yuv_init_swscaler( src,dst, &templ, yuv_sws_get_cpu_flags() ); vjsdl->src_frame = (void*) src; vjsdl->dst_frame = (void*) dst; return vjsdl; }
void *avhelper_get_decoder( const char *filename, int dst_pixfmt, int dst_width, int dst_height ) { char errbuf[512]; el_decoder_t *x = (el_decoder_t*) vj_calloc( sizeof( el_decoder_t )); if(!x) { return NULL; } #if LIBAVCODEC_BUILD > 5400 int err = avformat_open_input( &(x->avformat_ctx), filename, NULL, NULL ); #else int err = av_open_input_file( &(x->avformat_ctx),filename,NULL,0,NULL ); #endif if(err < 0 ) { av_strerror( err, errbuf, sizeof(errbuf)); veejay_msg(VEEJAY_MSG_DEBUG, "%s: %s", filename,errbuf ); free(x); return NULL; } #if LIBAVCODEC_BUILD > 5400 /* avformat_find_stream_info leaks memory */ err = avformat_find_stream_info( x->avformat_ctx, NULL ); #else err = av_find_stream_info( x->avformat_ctx ); #endif if( err < 0 ) { av_strerror( err, errbuf, sizeof(errbuf)); veejay_msg(VEEJAY_MSG_DEBUG, "%s: %s" ,filename,errbuf ); } if(err < 0 ) { avhelper_close_input_file( x->avformat_ctx ); free(x); return NULL; } unsigned int i,j; unsigned int n = x->avformat_ctx->nb_streams; int vi = -1; for( i = 0; i < n; i ++ ) { if( !x->avformat_ctx->streams[i]->codec ) continue; if( x->avformat_ctx->streams[i]->codec->codec_type > CODEC_ID_FIRST_SUBTITLE ) continue; if( x->avformat_ctx->streams[i]->codec->codec_type < CODEC_ID_FIRST_AUDIO ) { int sup_codec = 0; for( j = 0; _supported_codecs[j].name != NULL; j ++ ) { if( x->avformat_ctx->streams[i]->codec->codec_id == _supported_codecs[j].id ) { sup_codec = 1; goto further; } } further: if( !sup_codec ) { avhelper_close_input_file( x->avformat_ctx ); free(x); return NULL; } x->codec = avcodec_find_decoder( x->avformat_ctx->streams[i]->codec->codec_id ); if(x->codec == NULL ) { avhelper_close_input_file( x->avformat_ctx ); free(x); return NULL; } vi = i; veejay_msg(VEEJAY_MSG_DEBUG, "FFmpeg: video stream %d, codec_id %d", vi, x->avformat_ctx->streams[i]->codec->codec_id); break; } } if( vi == -1 ) { veejay_msg(VEEJAY_MSG_DEBUG, "FFmpeg: No video streams found"); avhelper_close_input_file( x->avformat_ctx ); free(x); return NULL; } x->codec_ctx = x->avformat_ctx->streams[vi]->codec; int wid = dst_width; int hei = dst_height; if( wid == -1 && hei == -1 ) { wid = x->codec_ctx->width; hei = x->codec_ctx->height; } #if LIBAVCODEC_BUILD > 5400 if ( avcodec_open2( x->codec_ctx, x->codec, NULL ) < 0 ) #else if ( avcodec_open( x->codec_ctx, x->codec ) < 0 ) #endif { avhelper_close_input_file( x->avformat_ctx ); free(x); return NULL; } veejay_memset( &(x->pkt), 0, sizeof(AVPacket)); AVFrame *f = avcodec_alloc_frame(); x->output = yuv_yuv_template( NULL,NULL,NULL, wid, hei, dst_pixfmt ); int got_picture = 0; while(1) { int ret = av_read_frame(x->avformat_ctx, &(x->pkt)); if( ret < 0 ) break; if ( x->pkt.stream_index == vi ) { avcodec_decode_video( x->codec_ctx,f,&got_picture, x->pkt.data, x->pkt.size ); avhelper_frame_unref( f ); } av_free_packet( &(x->pkt) ); if( got_picture ) break; } av_free(f); if(!got_picture) { veejay_msg(VEEJAY_MSG_ERROR, "FFmpeg: Unable to get whole picture from %s", filename ); avcodec_close( x->codec_ctx ); avhelper_close_input_file( x->avformat_ctx ); free(x->output); free(x); return NULL; } x->pixfmt = x->codec_ctx->pix_fmt; x->codec_id = x->codec_ctx->codec_id; x->frame = avcodec_alloc_frame(); x->input = yuv_yuv_template( NULL,NULL,NULL, x->codec_ctx->width,x->codec_ctx->height, x->pixfmt ); sws_template sws_tem; veejay_memset(&sws_tem, 0,sizeof(sws_template)); sws_tem.flags = yuv_which_scaler(); x->scaler = yuv_init_swscaler( x->input,x->output, &sws_tem, yuv_sws_get_cpu_flags()); if( x->scaler == NULL ) { veejay_msg(VEEJAY_MSG_ERROR,"FFmpeg: Failed to get scaler context for %dx%d in %d to %dx%d in %d", x->codec_ctx->width,x->codec_ctx->height, x->pixfmt, wid,hei,dst_pixfmt); av_free(f); avcodec_close( x->codec_ctx ); avhelper_close_input_file( x->avformat_ctx ); free(x->output); free(x->input); free(x); return NULL; } return (void*) x; }