static int open_file( char *psz_filename, hnd_t *p_handle, cli_output_opt_t *opt ) { mp4_hnd_t *p_mp4; *p_handle = NULL; int b_regular = strcmp( psz_filename, "-" ); b_regular = b_regular && x264_is_regular_file_path( psz_filename ); if( b_regular ) { FILE *fh = x264_fopen( psz_filename, "wb" ); MP4_FAIL_IF_ERR( !fh, "cannot open output file `%s'.\n", psz_filename ); b_regular = x264_is_regular_file( fh ); fclose( fh ); } p_mp4 = malloc( sizeof(mp4_hnd_t) ); MP4_FAIL_IF_ERR( !p_mp4, "failed to allocate memory for muxer information.\n" ); memset( p_mp4, 0, sizeof(mp4_hnd_t) ); p_mp4->b_dts_compress = opt->use_dts_compress; p_mp4->b_use_recovery = 0; p_mp4->b_no_pasp = 0; p_mp4->scale_method = ISOM_SCALE_METHOD_MEET; p_mp4->b_fragments = !b_regular; p_mp4->b_stdout = !strcmp( psz_filename, "-" ); char* ext = get_filename_extension( psz_filename ); if( !strcmp( ext, "mov" ) || !strcmp( ext, "qt" ) ) { p_mp4->major_brand = ISOM_BRAND_TYPE_QT; p_mp4->b_brand_qt = 1; } else if( !strcmp( ext, "3gp" ) ) { p_mp4->major_brand = ISOM_BRAND_TYPE_3GP6; p_mp4->i_brand_3gpp = 1; } else if( !strcmp( ext, "3g2" ) ) { p_mp4->major_brand = ISOM_BRAND_TYPE_3G2A; p_mp4->i_brand_3gpp = 2; } else p_mp4->major_brand = ISOM_BRAND_TYPE_MP42; p_mp4->p_root = lsmash_open_movie( psz_filename, p_mp4->b_fragments ? LSMASH_FILE_MODE_WRITE_FRAGMENTED : LSMASH_FILE_MODE_WRITE ); MP4_FAIL_IF_ERR_EX( !p_mp4->p_root, "failed to create root.\n" ); p_mp4->summary = (lsmash_video_summary_t *)lsmash_create_summary( LSMASH_SUMMARY_TYPE_VIDEO ); MP4_FAIL_IF_ERR_EX( !p_mp4->summary, "failed to allocate memory for summary information of video.\n" ); p_mp4->summary->sample_type = ISOM_CODEC_TYPE_AVC1_VIDEO; *p_handle = p_mp4; return 0; }
Test(test_pathutils, test_get_filename_extension) { cr_assert_str_eq(get_filename_extension("test.foo"), "foo", "wrong file name extension returned"); cr_assert_str_eq(get_filename_extension("/test/test.foo.bar"), "bar", "wrong file name extension returned"); cr_assert_str_eq(get_filename_extension("/test/.test/test.foo.bar"), "bar", "wrong file name extension returned"); cr_assert_null(get_filename_extension("/test"), "wrong file name extension returned"); cr_assert_null(get_filename_extension("test."), "wrong file name extension returned"); cr_assert_null(get_filename_extension(""), "wrong file name extension returned"); cr_assert_null(get_filename_extension(NULL), "wrong file name extension returned"); }
int main (int argc, char *argv[]) { char * in_name, * in_ext, * in_filename, * out_filename; if (argc != 2) { printf("Usage %s <grammar_filename>\n", argv[0]); return 1; } in_filename = argv[1]; infile = fopen(in_filename, "r"); if (infile == NULL) { fprintf(stderr, "ERROR: Could not open file \"%s\"\n", in_filename); return 1; } in_ext = get_filename_extension(in_filename); in_name = get_filename(in_filename); out_filename = store_to_file(in_name, in_ext); printf("###\tRecursive Generator\t###\n"); outfile = fopen(out_filename, "w"); if (outfile == NULL) { fprintf(stderr, "ERROR:Could not open file \"%s\"\n", outfile); fclose(infile); return 1; } printf("Parsing file %s...\n", in_filename); parse_file(infile); //print_grammar(); printf("Output filename: %s\n", out_filename); printf("Generating sentence... \t"); generate_sentence(START_ELEMENT); printf("Done!\n"); fprintf(outfile, "\n"); fclose(infile); fclose(outfile); printf("Quiting...\n"); destroy_grammar(); return 0; }
int main(int argc, char **argv) { Atom expr; Atom result; Atom env; Error err; char input[MAXLINE]; char *end = input; env_init(&env); //print_expr(env); FILE *fp = NULL; /* no arguments were passed */ if(argc < 2) fp = stdin; else { fp = fopen(argv[1], "r"); if(fp == NULL) { fprintf(stderr, "Cannot open file %s\n", argv[1]); return -1; } const char * file_extension = get_filename_extension(argv[1]); if(strcmp(file_extension, "liuyong") != 0) { fprintf(stderr, "file extension must be .liuyong\n"); return -1; } } printf("> "); while(fgets(input, MAXLINE, fp) != NULL) { err = read_expr(input, &end, &expr); //print_expr(expr); if(!err) err = eval_expr(expr, env, &result); switch(err) { case ERROR_OK: print_expr(result); printf("\n"); break; case ERROR_SYNTAX: printf("Syntax Error\n"); break; case ERROR_UNBOUND: printf("Symbol Not Bound\n"); break; case ERROR_TYPE: printf("Wrong Type\n"); break; case ERROR_ARGS: printf("Wrong Nomber Of Arguments\n"); break; } printf("> "); } }
static int open_file( char *psz_filename, hnd_t *p_handle, video_info_t *info, cli_input_opt_t *opt ) { lavf_hnd_t *h = malloc( sizeof(lavf_hnd_t) ); if( !h ) return -1; av_register_all(); h->scaler = NULL; if( !strcmp( psz_filename, "-" ) ) psz_filename = "pipe:"; if( av_open_input_file( &h->lavf, psz_filename, NULL, 0, NULL ) ) { fprintf( stderr, "lavf [error]: could not open input file\n" ); return -1; } if( av_find_stream_info( h->lavf ) < 0 ) { fprintf( stderr, "lavf [error]: could not find input stream info\n" ); return -1; } int i = 0; while( i < h->lavf->nb_streams && h->lavf->streams[i]->codec->codec_type != CODEC_TYPE_VIDEO ) i++; if( i == h->lavf->nb_streams ) { fprintf( stderr, "lavf [error]: could not find video stream\n" ); return -1; } h->stream_id = i; h->next_frame = 0; h->pts_offset_flag = 0; h->pts_offset = 0; AVCodecContext *c = h->lavf->streams[i]->codec; h->init_width = h->cur_width = info->width = c->width; h->init_height = h->cur_height = info->height = c->height; h->cur_pix_fmt = c->pix_fmt; info->fps_num = h->lavf->streams[i]->r_frame_rate.num; info->fps_den = h->lavf->streams[i]->r_frame_rate.den; info->timebase_num = h->lavf->streams[i]->time_base.num; info->timebase_den = h->lavf->streams[i]->time_base.den; h->vfr_input = info->vfr; h->vertical_flip = 0; /* avisynth stores rgb data vertically flipped. */ if( !strcasecmp( get_filename_extension( psz_filename ), "avs" ) && (h->cur_pix_fmt == PIX_FMT_BGRA || h->cur_pix_fmt == PIX_FMT_BGR24) ) info->csp |= X264_CSP_VFLIP; if( h->cur_pix_fmt != PIX_FMT_YUV420P ) fprintf( stderr, "lavf [warning]: converting from %s to YV12\n", avcodec_get_pix_fmt_name( h->cur_pix_fmt ) ); if( avcodec_open( c, avcodec_find_decoder( c->codec_id ) ) ) { fprintf( stderr, "lavf [error]: could not find decoder for video stream\n" ); return -1; } /* prefetch the first frame and set/confirm flags */ h->first_pic = malloc( sizeof(x264_picture_t) ); if( !h->first_pic || lavf_input.picture_alloc( h->first_pic, info->csp, info->width, info->height ) ) { fprintf( stderr, "lavf [error]: malloc failed\n" ); return -1; } else if( read_frame_internal( h->first_pic, h, 0, info ) ) return -1; info->sar_height = c->sample_aspect_ratio.den; info->sar_width = c->sample_aspect_ratio.num; *p_handle = h; return 0; }
static void test_get_filename_extension(void) { assert_string(get_filename_extension("test.csv"), "csv", "test.csv, expected extension: csv"); assert_string(get_filename_extension(".test.csv"), "csv", ".test.csv, expected extension: csv"); assert_string(get_filename_extension("test.csv.orig"), "orig", "test.csv.orig, expected extension: orig"); assert_string(get_filename_extension("test.csv~"), "csv~", "test.csv~, expected extension:csv~"); assert_string(get_filename_extension("1.x") , "x", "x, expected extension is x"); assert_true(get_filename_extension("filename") == NULL, "filename, expected extension is NULL"); assert_true(get_filename_extension("") == NULL, "input is empty, expected extension is NULL"); assert_true(get_filename_extension(".config") == NULL, ".config, expected extension is NULL"); assert_true(get_filename_extension(".") == NULL, ". , expected extension is NULL"); assert_true(get_filename_extension("...") == NULL, "..., expected extension is NULL"); assert_true(get_filename_extension("1.") == NULL, "1., expected extension is NULL"); assert_true(get_filename_extension(NULL) == NULL, "input is NULL, expected extension is NULL"); }
static int open_file( char *psz_filename, hnd_t *p_handle, video_info_t *info, cli_input_opt_t *opt ) { FILE *fh = x264_fopen( psz_filename, "r" ); if( !fh ) return -1; int b_regular = x264_is_regular_file( fh ); fclose( fh ); FAIL_IF_ERROR( !b_regular, "AVS input is incompatible with non-regular file `%s'\n", psz_filename ); avs_hnd_t *h = calloc( 1, sizeof(avs_hnd_t) ); if( !h ) return -1; FAIL_IF_ERROR( custom_avs_load_library( h ), "failed to load avisynth\n" ); h->env = h->func.avs_create_script_environment( AVS_INTERFACE_25 ); if( h->func.avs_get_error ) { const char *error = h->func.avs_get_error( h->env ); FAIL_IF_ERROR( error, "%s\n", error ); } float avs_version = get_avs_version( h ); if( avs_version <= 0 ) return -1; x264_cli_log( "avs", X264_LOG_DEBUG, "using avisynth version %.2f\n", avs_version ); #ifdef _WIN32 /* Avisynth doesn't support Unicode filenames. */ char ansi_filename[MAX_PATH]; FAIL_IF_ERROR( !x264_ansi_filename( psz_filename, ansi_filename, MAX_PATH, 0 ), "invalid ansi filename\n" ); AVS_Value arg = avs_new_value_string( ansi_filename ); #else AVS_Value arg = avs_new_value_string( psz_filename ); #endif AVS_Value res; char *filename_ext = get_filename_extension( psz_filename ); if( !strcasecmp( filename_ext, "avs" ) ) { res = h->func.avs_invoke( h->env, "Import", arg, NULL ); FAIL_IF_ERROR( avs_is_error( res ), "%s\n", avs_as_error( res ) ); /* check if the user is using a multi-threaded script and apply distributor if necessary. adapted from avisynth's vfw interface */ AVS_Value mt_test = h->func.avs_invoke( h->env, "GetMTMode", avs_new_value_bool( 0 ), NULL ); int mt_mode = avs_is_int( mt_test ) ? avs_as_int( mt_test ) : 0; h->func.avs_release_value( mt_test ); if( mt_mode > 0 && mt_mode < 5 ) { AVS_Value temp = h->func.avs_invoke( h->env, "Distributor", res, NULL ); h->func.avs_release_value( res ); res = temp; } } else /* non script file */ { /* cycle through known source filters to find one that works */ const char *filter[AVS_MAX_SEQUENCE+1] = { 0 }; avs_build_filter_sequence( filename_ext, filter ); int i; for( i = 0; filter[i]; i++ ) { x264_cli_log( "avs", X264_LOG_INFO, "trying %s... ", filter[i] ); if( !h->func.avs_function_exists( h->env, filter[i] ) ) { x264_cli_printf( X264_LOG_INFO, "not found\n" ); continue; } if( !strncasecmp( filter[i], "FFmpegSource", 12 ) ) { x264_cli_printf( X264_LOG_INFO, "indexing... " ); fflush( stderr ); } res = h->func.avs_invoke( h->env, filter[i], arg, NULL ); if( !avs_is_error( res ) ) { x264_cli_printf( X264_LOG_INFO, "succeeded\n" ); break; } x264_cli_printf( X264_LOG_INFO, "failed\n" ); } FAIL_IF_ERROR( !filter[i], "unable to find source filter to open `%s'\n", psz_filename ); } FAIL_IF_ERROR( !avs_is_clip( res ), "`%s' didn't return a video clip\n", psz_filename ); h->clip = h->func.avs_take_clip( res, h->env ); const AVS_VideoInfo *vi = h->func.avs_get_video_info( h->clip ); FAIL_IF_ERROR( !avs_has_video( vi ), "`%s' has no video data\n", psz_filename ); /* if the clip is made of fields instead of frames, call weave to make them frames */ if( avs_is_field_based( vi ) ) { x264_cli_log( "avs", X264_LOG_WARNING, "detected fieldbased (separated) input, weaving to frames\n" ); AVS_Value tmp = h->func.avs_invoke( h->env, "Weave", res, NULL ); FAIL_IF_ERROR( avs_is_error( tmp ), "couldn't weave fields into frames: %s\n", avs_as_error( tmp ) ); res = update_clip( h, &vi, tmp, res ); info->interlaced = 1; info->tff = avs_is_tff( vi ); } #if !HAVE_SWSCALE /* if swscale is not available, convert the CSP if necessary */ FAIL_IF_ERROR( avs_version < 2.6f && (opt->output_csp == X264_CSP_I400 || opt->output_csp == X264_CSP_I422 || opt->output_csp == X264_CSP_I444), "avisynth >= 2.6 is required for i400/i422/i444 output\n" ); if( (opt->output_csp == X264_CSP_I400 && !AVS_IS_Y( vi )) || (opt->output_csp == X264_CSP_I420 && !AVS_IS_420( vi )) || (opt->output_csp == X264_CSP_I422 && !AVS_IS_422( vi )) || (opt->output_csp == X264_CSP_I444 && !AVS_IS_444( vi )) || (opt->output_csp == X264_CSP_RGB && !avs_is_rgb( vi )) ) { const char *csp; if( AVS_IS_AVISYNTHPLUS ) { csp = opt->output_csp == X264_CSP_I400 ? "Y" : opt->output_csp == X264_CSP_I420 ? "YUV420" : opt->output_csp == X264_CSP_I422 ? "YUV422" : opt->output_csp == X264_CSP_I444 ? "YUV444" : "RGB"; } else { csp = opt->output_csp == X264_CSP_I400 ? "Y8" : opt->output_csp == X264_CSP_I420 ? "YV12" : opt->output_csp == X264_CSP_I422 ? "YV16" : opt->output_csp == X264_CSP_I444 ? "YV24" : "RGB"; } x264_cli_log( "avs", X264_LOG_WARNING, "converting input clip to %s\n", csp ); if( opt->output_csp != X264_CSP_I400 ) { FAIL_IF_ERROR( opt->output_csp < X264_CSP_I444 && (vi->width&1), "input clip width not divisible by 2 (%dx%d)\n", vi->width, vi->height ); FAIL_IF_ERROR( opt->output_csp == X264_CSP_I420 && info->interlaced && (vi->height&3), "input clip height not divisible by 4 (%dx%d)\n", vi->width, vi->height ); FAIL_IF_ERROR( (opt->output_csp == X264_CSP_I420 || info->interlaced) && (vi->height&1), "input clip height not divisible by 2 (%dx%d)\n", vi->width, vi->height ); } char conv_func[16]; snprintf( conv_func, sizeof(conv_func), "ConvertTo%s", csp ); AVS_Value arg_arr[3]; const char *arg_name[3]; int arg_count = 1; arg_arr[0] = res; arg_name[0] = NULL; if( opt->output_csp != X264_CSP_I400 ) { arg_arr[arg_count] = avs_new_value_bool( info->interlaced ); arg_name[arg_count] = "interlaced"; arg_count++; } /* if doing a rgb <-> yuv conversion then range is handled via 'matrix'. though it's only supported in 2.56+ */ char matrix[7]; if( avs_version >= 2.56f && ((opt->output_csp == X264_CSP_RGB && avs_is_yuv( vi )) || (opt->output_csp != X264_CSP_RGB && avs_is_rgb( vi ))) ) { // if converting from yuv, then we specify the matrix for the input, otherwise use the output's. int use_pc_matrix = avs_is_yuv( vi ) ? opt->input_range == RANGE_PC : opt->output_range == RANGE_PC; snprintf( matrix, sizeof(matrix), "%s601", use_pc_matrix ? "PC." : "Rec" ); /* FIXME: use correct coefficients */ arg_arr[arg_count] = avs_new_value_string( matrix ); arg_name[arg_count] = "matrix"; arg_count++; // notification that the input range has changed to the desired one opt->input_range = opt->output_range; } AVS_Value res2 = h->func.avs_invoke( h->env, conv_func, avs_new_value_array( arg_arr, arg_count ), arg_name ); FAIL_IF_ERROR( avs_is_error( res2 ), "couldn't convert input clip to %s: %s\n", csp, avs_as_error( res2 ) ); res = update_clip( h, &vi, res2, res ); } /* if swscale is not available, change the range if necessary. This only applies to YUV-based CSPs however */ if( avs_is_yuv( vi ) && opt->output_range != RANGE_AUTO && ((opt->input_range == RANGE_PC) != opt->output_range) ) { const char *levels = opt->output_range ? "TV->PC" : "PC->TV"; x264_cli_log( "avs", X264_LOG_WARNING, "performing %s conversion\n", levels ); AVS_Value arg_arr[2]; arg_arr[0] = res; arg_arr[1] = avs_new_value_string( levels ); const char *arg_name[] = { NULL, "levels" }; AVS_Value res2 = h->func.avs_invoke( h->env, "ColorYUV", avs_new_value_array( arg_arr, 2 ), arg_name ); FAIL_IF_ERROR( avs_is_error( res2 ), "couldn't convert range: %s\n", avs_as_error( res2 ) ); res = update_clip( h, &vi, res2, res ); // notification that the input range has changed to the desired one opt->input_range = opt->output_range; } #endif h->func.avs_release_value( res ); info->width = vi->width; info->height = vi->height; info->fps_num = vi->fps_numerator; info->fps_den = vi->fps_denominator; h->num_frames = info->num_frames = vi->num_frames; info->thread_safe = 1; if( AVS_IS_RGB64( vi ) ) info->csp = X264_CSP_BGRA | X264_CSP_VFLIP | X264_CSP_HIGH_DEPTH; else if( avs_is_rgb32( vi ) ) info->csp = X264_CSP_BGRA | X264_CSP_VFLIP; else if( AVS_IS_RGB48( vi ) ) info->csp = X264_CSP_BGR | X264_CSP_VFLIP | X264_CSP_HIGH_DEPTH; else if( avs_is_rgb24( vi ) ) info->csp = X264_CSP_BGR | X264_CSP_VFLIP; else if( AVS_IS_YUV444P16( vi ) ) info->csp = X264_CSP_I444 | X264_CSP_HIGH_DEPTH; else if( avs_is_yv24( vi ) ) info->csp = X264_CSP_I444; else if( AVS_IS_YUV422P16( vi ) ) info->csp = X264_CSP_I422 | X264_CSP_HIGH_DEPTH; else if( avs_is_yv16( vi ) ) info->csp = X264_CSP_I422; else if( AVS_IS_YUV420P16( vi ) ) info->csp = X264_CSP_I420 | X264_CSP_HIGH_DEPTH; else if( avs_is_yv12( vi ) ) info->csp = X264_CSP_I420; else if( AVS_IS_Y16( vi ) ) info->csp = X264_CSP_I400 | X264_CSP_HIGH_DEPTH; else if( avs_is_y8( vi ) ) info->csp = X264_CSP_I400; else if( avs_is_yuy2( vi ) ) info->csp = X264_CSP_YUYV; #if HAVE_SWSCALE else if( avs_is_yv411( vi ) ) info->csp = AV_PIX_FMT_YUV411P | X264_CSP_OTHER; #endif else { AVS_Value pixel_type = h->func.avs_invoke( h->env, "PixelType", res, NULL ); const char *pixel_type_name = avs_is_string( pixel_type ) ? avs_as_string( pixel_type ) : "unknown"; FAIL_IF_ERROR( 1, "not supported pixel type: %s\n", pixel_type_name ); } info->vfr = 0; *p_handle = h; return 0; }
static int open_file( char *psz_filename, hnd_t *p_handle, video_info_t *info, cli_input_opt_t *opt ) { FILE *fh = fopen( psz_filename, "r" ); if( !fh ) return -1; FAIL_IF_ERROR( !x264_is_regular_file( fh ), "AVS input is incompatible with non-regular file `%s'\n", psz_filename ); fclose( fh ); avs_hnd_t *h = malloc( sizeof(avs_hnd_t) ); if( !h ) return -1; FAIL_IF_ERROR( x264_avs_load_library( h ), "failed to load avisynth\n" ) h->env = h->func.avs_create_script_environment( AVS_INTERFACE_25 ); FAIL_IF_ERROR( !h->env, "failed to initiate avisynth\n" ) AVS_Value arg = avs_new_value_string( psz_filename ); AVS_Value res; char *filename_ext = get_filename_extension( psz_filename ); if( !strcasecmp( filename_ext, "avs" ) ) { res = h->func.avs_invoke( h->env, "Import", arg, NULL ); FAIL_IF_ERROR( avs_is_error( res ), "%s\n", avs_as_string( res ) ) /* check if the user is using a multi-threaded script and apply distributor if necessary. adapted from avisynth's vfw interface */ AVS_Value mt_test = h->func.avs_invoke( h->env, "GetMTMode", avs_new_value_bool( 0 ), NULL ); int mt_mode = avs_is_int( mt_test ) ? avs_as_int( mt_test ) : 0; h->func.avs_release_value( mt_test ); if( mt_mode > 0 && mt_mode < 5 ) { AVS_Value temp = h->func.avs_invoke( h->env, "Distributor", res, NULL ); h->func.avs_release_value( res ); res = temp; } } else /* non script file */ { /* cycle through known source filters to find one that works */ const char *filter[AVS_MAX_SEQUENCE+1] = { 0 }; avs_build_filter_sequence( filename_ext, filter ); int i; for( i = 0; filter[i]; i++ ) { x264_cli_log( "avs", X264_LOG_INFO, "trying %s... ", filter[i] ); if( !h->func.avs_function_exists( h->env, filter[i] ) ) { x264_cli_printf( X264_LOG_INFO, "not found\n" ); continue; } if( !strncasecmp( filter[i], "FFmpegSource", 12 ) ) { x264_cli_printf( X264_LOG_INFO, "indexing... " ); fflush( stderr ); } res = h->func.avs_invoke( h->env, filter[i], arg, NULL ); if( !avs_is_error( res ) ) { x264_cli_printf( X264_LOG_INFO, "succeeded\n" ); break; } x264_cli_printf( X264_LOG_INFO, "failed\n" ); } FAIL_IF_ERROR( !filter[i], "unable to find source filter to open `%s'\n", psz_filename ) } FAIL_IF_ERROR( !avs_is_clip( res ), "`%s' didn't return a video clip\n", psz_filename ) h->clip = h->func.avs_take_clip( res, h->env ); const AVS_VideoInfo *vi = h->func.avs_get_video_info( h->clip ); FAIL_IF_ERROR( !avs_has_video( vi ), "`%s' has no video data\n", psz_filename ) /* if the clip is made of fields instead of frames, call weave to make them frames */ if( avs_is_field_based( vi ) ) { x264_cli_log( "avs", X264_LOG_WARNING, "detected fieldbased (separated) input, weaving to frames\n" ); AVS_Value tmp = h->func.avs_invoke( h->env, "Weave", res, NULL ); FAIL_IF_ERROR( avs_is_error( tmp ), "couldn't weave fields into frames\n" ) res = update_clip( h, &vi, tmp, res ); info->interlaced = 1; info->tff = avs_is_tff( vi ); } #if !HAVE_SWSCALE /* if swscale is not available, convert CSPs to yv12 */ if( !avs_is_yv12( vi ) ) { x264_cli_log( "avs", X264_LOG_WARNING, "converting input clip to YV12\n" ); FAIL_IF_ERROR( vi->width&1 || vi->height&1, "input clip width or height not divisible by 2 (%dx%d)\n", vi->width, vi->height ) const char *arg_name[2] = { NULL, "interlaced" }; AVS_Value arg_arr[2] = { res, avs_new_value_bool( info->interlaced ) }; AVS_Value res2 = h->func.avs_invoke( h->env, "ConvertToYV12", avs_new_value_array( arg_arr, 2 ), arg_name ); FAIL_IF_ERROR( avs_is_error( res2 ), "couldn't convert input clip to YV12\n" ) res = update_clip( h, &vi, res2, res ); }