Пример #1
0
struct AVS_Value* inline_avs_new_value_array(AVS_Value * ptr, AVS_Value * v0, int size) 
{
    int a = 0;
    if(ptr)
	*ptr = avs_new_value_array(v0, size);
    return ptr;
}
Пример #2
0
static int get_avisynth_version(avs_hnd_t *ah)
{
    if (!ah->func.avs_function_exists(ah->env, "VersionNumber"))
        return 0;

    AVS_Value ver = ah->func.avs_invoke(ah->env, "VersionNumber", avs_new_value_array(NULL, 0), NULL);
    if (avs_is_error(ver) || !avs_is_float(ver))
        return 0;
    int version = (int)(avs_as_float(ver) * 100 + 0.5);
    ah->func.avs_release_value(ver);
    return version;
}
Пример #3
0
static float get_avs_version( avs_hnd_t *h )
{
/* AvxSynth has its version defined starting at 4.0, even though it's based on
   AviSynth 2.5.8. This is troublesome for get_avs_version and working around
   the new colorspaces in 2.6.  So if AvxSynth is detected, explicitly define
   the version as 2.58. */
#if USE_AVXSYNTH
    return 2.58f;
#else
    FAIL_IF_ERROR( !h->func.avs_function_exists( h->env, "VersionNumber" ), "VersionNumber does not exist\n" );
    AVS_Value ver = h->func.avs_invoke( h->env, "VersionNumber", avs_new_value_array( NULL, 0 ), NULL );
    FAIL_IF_ERROR( avs_is_error( ver ), "unable to determine avisynth version: %s\n", avs_as_error( ver ) );
    FAIL_IF_ERROR( !avs_is_float( ver ), "VersionNumber did not return a float value\n" );
    float ret = avs_as_float( ver );
    h->func.avs_release_value( ver );
    return ret;
#endif
}
Пример #4
0
static AVS_VAlue import_d2v_jackie(avs_hnd_t *ah, LPSTR input)
{
    if (!ah->function_exists(ah->env, "MPEG2Dec3_MPEG2Source"))
        return avs_void;
    AVS_Value arg_arr[8] = {
        avs_new_value_string(input),
        avs_new_value_int(ah->d2v.idct),
        avs_new_value_int(ah->d2v.cpu),
        avs_new_value_int(ah->d2v.moderate_h),
        avs_new_value_int(ah->d2v.moderate_v),
        avs_new_value_string(ah->d2v.cpu2),
        avs_new_value_bool(ah->d2v.showq),
        avs_new_value_bool(ah->d2v.fastmc)
    };
    const char *name[8] = {
        NULL, "idct", "cpu", "moderate_h",
        "moderate_v", "cpu2", "showQ", "fastMC"
    };
    AVS_Value arg =avs_new_value_string(input);
    return ah->avs_invoke(ah->env, "MPEG2Dec3_MPEG2Source", avs_new_value_array(arg_arr, 8), name);
}
Пример #5
0
static AVS_Value import_d2v_donald(avs_hnd_t *ah, LPSTR input)
{
    if (!ah->func.avs_function_exists(ah->env, "DGDecode_MPEG2Source") && load_dgdecode_dll(ah))
        return avs_void;

    AVS_Value arg_arr[10] = {
        avs_new_value_string(input),
        avs_new_value_int(ah->d2v.upconv),
        avs_new_value_int(ah->d2v.idct),
        avs_new_value_int(ah->d2v.cpu),
        avs_new_value_int(ah->d2v.moderate_h),
        avs_new_value_int(ah->d2v.moderate_v),
        avs_new_value_string(ah->d2v.cpu2),
        avs_new_value_int(ah->d2v.info),
        avs_new_value_bool(ah->d2v.showq),
        avs_new_value_bool(ah->d2v.fastmc)
    };
    const char *name[10] = {
        NULL, "upConv", "idct", "cpu", "moderate_h",
        "moderate_v", "cpu2", "info", "showQ", "fastMC"
    };
    return ah->func.avs_invoke(ah->env, "DGDecode_MPEG2Source", avs_new_value_array(arg_arr, 10), name);
}
Пример #6
0
AVS_T *initAVS(const char *filename)
{
    AVS_T *AVS = malloc (sizeof(AVS_T));
    AVS_Value arg0 = avs_new_value_string(filename);
    AVS_Value args = avs_new_value_array(&arg0, 1);

    memset(AVS, 0, sizeof(AVS_T));

#ifdef WIN32_LOADER
    AVS->ldt_fs = Setup_LDT_Keeper();
#endif

    AVS->dll = LoadLibraryA("avisynth.dll");
    if(!AVS->dll)
    {
        mp_msg(MSGT_DEMUX ,MSGL_V, "AVS: failed to load avisynth.dll\n");
        goto avs_err;
    }

    /* Dynamic import of needed stuff from avisynth.dll */
    IMPORT_FUNC(avs_create_script_environment);
    IMPORT_FUNC(avs_invoke);
    IMPORT_FUNC(avs_get_video_info);
    IMPORT_FUNC(avs_take_clip);
    IMPORT_FUNC(avs_release_clip);
    IMPORT_FUNC(avs_get_frame);
    IMPORT_FUNC(avs_release_video_frame);
    IMPORT_FUNC(avs_get_audio);

    AVS->avs_env = AVS->avs_create_script_environment(AVISYNTH_INTERFACE_VERSION);
    if (!AVS->avs_env)
    {
        mp_msg(MSGT_DEMUX, MSGL_V, "AVS: avs_create_script_environment failed\n");
        goto avs_err;
    }


    AVS->handler = AVS->avs_invoke(AVS->avs_env, "Import", args, 0);

    if (avs_is_error(AVS->handler))
    {
        mp_msg(MSGT_DEMUX, MSGL_V, "AVS: Avisynth error: %s\n", avs_as_string(AVS->handler));
        goto avs_err;
    }

    if (!avs_is_clip(AVS->handler))
    {
        mp_msg(MSGT_DEMUX, MSGL_V, "AVS: Avisynth doesn't return a clip\n");
        goto avs_err;
    }

    return AVS;

avs_err:
    if (AVS->dll) FreeLibrary(AVS->dll);
#ifdef WIN32_LOADER
    Restore_LDT_Keeper(AVS->ldt_fs);
#endif
    free(AVS);
    return NULL;
}
Пример #7
0
static demuxer_t* demux_open_avs(demuxer_t* demuxer)
{
    int found = 0;
    AVS_T *AVS = demuxer->priv;
    int audio_samplesize = 0;
    AVS->frameno = 0;
    AVS->sampleno = 0;

    mp_msg(MSGT_DEMUX, MSGL_V, "AVS: demux_open_avs()\n");
    demuxer->seekable = 1;

    AVS->clip = AVS->avs_take_clip(AVS->handler, AVS->avs_env);
    if(!AVS->clip)
    {
        mp_msg(MSGT_DEMUX, MSGL_V, "AVS: avs_take_clip() failed\n");
        return NULL;
    }

    AVS->video_info = AVS->avs_get_video_info(AVS->clip);
    if (!AVS->video_info)
    {
        mp_msg(MSGT_DEMUX, MSGL_V, "AVS: avs_get_video_info() call failed\n");
        return NULL;
    }

    if (!avs_is_yv12(AVS->video_info))
    {
        AVS->handler = AVS->avs_invoke(AVS->avs_env, "ConvertToYV12", avs_new_value_array(&AVS->handler, 1), 0);
        if (avs_is_error(AVS->handler))
        {
            mp_msg(MSGT_DEMUX, MSGL_V, "AVS: Cannot convert input video to YV12: %s\n", avs_as_string(AVS->handler));
            return NULL;
        }

        AVS->clip = AVS->avs_take_clip(AVS->handler, AVS->avs_env);

        if(!AVS->clip)
        {
            mp_msg(MSGT_DEMUX, MSGL_V, "AVS: avs_take_clip() failed\n");
            return NULL;
        }

        AVS->video_info = AVS->avs_get_video_info(AVS->clip);
        if (!AVS->video_info)
        {
            mp_msg(MSGT_DEMUX, MSGL_V, "AVS: avs_get_video_info() call failed\n");
            return NULL;
        }
    }

    // TODO check field-based ??

    /* Video */
    if (avs_has_video(AVS->video_info))
    {
        sh_video_t *sh_video = new_sh_video(demuxer, 0);
        found = 1;

        if (demuxer->video->id == -1) demuxer->video->id = 0;
        if (demuxer->video->id == 0)
        demuxer->video->sh = sh_video;
        sh_video->ds = demuxer->video;

        sh_video->disp_w = AVS->video_info->width;
        sh_video->disp_h = AVS->video_info->height;

        //sh_video->format = get_mmioFOURCC(AVS->video_info);
        sh_video->format = mmioFOURCC('Y', 'V', '1', '2');
        sh_video->fps = (double) AVS->video_info->fps_numerator / (double) AVS->video_info->fps_denominator;
        sh_video->frametime = 1.0 / sh_video->fps;

        sh_video->bih = malloc(sizeof(BITMAPINFOHEADER) + (256 * 4));
        sh_video->bih->biCompression = sh_video->format;
        sh_video->bih->biBitCount = avs_bits_per_pixel(AVS->video_info);
        //sh_video->bih->biPlanes = 2;

        sh_video->bih->biWidth = AVS->video_info->width;
        sh_video->bih->biHeight = AVS->video_info->height;
        sh_video->num_frames = 0;
        sh_video->num_frames_decoded = 0;
    }

    /* Audio */
    if (avs_has_audio(AVS->video_info))
      switch (AVS->video_info->sample_type) {
        case AVS_SAMPLE_INT8:  audio_samplesize = 1; break;
        case AVS_SAMPLE_INT16: audio_samplesize = 2; break;
        case AVS_SAMPLE_INT24: audio_samplesize = 3; break;
        case AVS_SAMPLE_INT32:
        case AVS_SAMPLE_FLOAT: audio_samplesize = 4; break;
        default:
          mp_msg(MSGT_DEMUX, MSGL_ERR, "AVS: unknown audio type, disabling\n");
      }
    if (audio_samplesize)
    {
        sh_audio_t *sh_audio = new_sh_audio(demuxer, 0);
        found = 1;
        mp_msg(MSGT_DEMUX, MSGL_V, "AVS: Clip has audio -> Channels = %d - Freq = %d\n", AVS->video_info->nchannels, AVS->video_info->audio_samples_per_second);

        if (demuxer->audio->id == -1) demuxer->audio->id = 0;
        if (demuxer->audio->id == 0)
        demuxer->audio->sh = sh_audio;
        sh_audio->ds = demuxer->audio;

        sh_audio->wf = malloc(sizeof(WAVEFORMATEX));
        sh_audio->wf->wFormatTag = sh_audio->format =
            (AVS->video_info->sample_type == AVS_SAMPLE_FLOAT) ? 0x3 : 0x1;
        sh_audio->wf->nChannels = sh_audio->channels = AVS->video_info->nchannels;
        sh_audio->wf->nSamplesPerSec = sh_audio->samplerate = AVS->video_info->audio_samples_per_second;
        sh_audio->samplesize = audio_samplesize;
        sh_audio->wf->nAvgBytesPerSec = sh_audio->channels * sh_audio->samplesize * sh_audio->samplerate;
        sh_audio->wf->nBlockAlign = sh_audio->channels * sh_audio->samplesize;
        sh_audio->wf->wBitsPerSample = sh_audio->samplesize * 8;
        sh_audio->wf->cbSize = 0;
        sh_audio->i_bps = sh_audio->wf->nAvgBytesPerSec;
    }

    AVS->init = 1;
    if (found)
        return demuxer;
    else
        return NULL;
}
Пример #8
0
static int open_file( char *psz_filename, hnd_t *p_handle, video_info_t *info, cli_input_opt_t *opt )
{
    FILE *fh = x264_fopen( psz_filename, "r" );
    if( !fh )
        return -1;
    int b_regular = x264_is_regular_file( fh );
    fclose( fh );
    FAIL_IF_ERROR( !b_regular, "AVS input is incompatible with non-regular file `%s'\n", psz_filename );

    avs_hnd_t *h = calloc( 1, sizeof(avs_hnd_t) );
    if( !h )
        return -1;
    FAIL_IF_ERROR( custom_avs_load_library( h ), "failed to load avisynth\n" );
    h->env = h->func.avs_create_script_environment( AVS_INTERFACE_25 );
    if( h->func.avs_get_error )
    {
        const char *error = h->func.avs_get_error( h->env );
        FAIL_IF_ERROR( error, "%s\n", error );
    }
    float avs_version = get_avs_version( h );
    if( avs_version <= 0 )
        return -1;
    x264_cli_log( "avs", X264_LOG_DEBUG, "using avisynth version %.2f\n", avs_version );

#ifdef _WIN32
    /* Avisynth doesn't support Unicode filenames. */
    char ansi_filename[MAX_PATH];
    FAIL_IF_ERROR( !x264_ansi_filename( psz_filename, ansi_filename, MAX_PATH, 0 ), "invalid ansi filename\n" );
    AVS_Value arg = avs_new_value_string( ansi_filename );
#else
    AVS_Value arg = avs_new_value_string( psz_filename );
#endif

    AVS_Value res;
    char *filename_ext = get_filename_extension( psz_filename );

    if( !strcasecmp( filename_ext, "avs" ) )
    {
        res = h->func.avs_invoke( h->env, "Import", arg, NULL );
        FAIL_IF_ERROR( avs_is_error( res ), "%s\n", avs_as_error( res ) );
        /* check if the user is using a multi-threaded script and apply distributor if necessary.
           adapted from avisynth's vfw interface */
        AVS_Value mt_test = h->func.avs_invoke( h->env, "GetMTMode", avs_new_value_bool( 0 ), NULL );
        int mt_mode = avs_is_int( mt_test ) ? avs_as_int( mt_test ) : 0;
        h->func.avs_release_value( mt_test );
        if( mt_mode > 0 && mt_mode < 5 )
        {
            AVS_Value temp = h->func.avs_invoke( h->env, "Distributor", res, NULL );
            h->func.avs_release_value( res );
            res = temp;
        }
    }
    else /* non script file */
    {
        /* cycle through known source filters to find one that works */
        const char *filter[AVS_MAX_SEQUENCE+1] = { 0 };
        avs_build_filter_sequence( filename_ext, filter );
        int i;
        for( i = 0; filter[i]; i++ )
        {
            x264_cli_log( "avs", X264_LOG_INFO, "trying %s... ", filter[i] );
            if( !h->func.avs_function_exists( h->env, filter[i] ) )
            {
                x264_cli_printf( X264_LOG_INFO, "not found\n" );
                continue;
            }
            if( !strncasecmp( filter[i], "FFmpegSource", 12 ) )
            {
                x264_cli_printf( X264_LOG_INFO, "indexing... " );
                fflush( stderr );
            }
            res = h->func.avs_invoke( h->env, filter[i], arg, NULL );
            if( !avs_is_error( res ) )
            {
                x264_cli_printf( X264_LOG_INFO, "succeeded\n" );
                break;
            }
            x264_cli_printf( X264_LOG_INFO, "failed\n" );
        }
        FAIL_IF_ERROR( !filter[i], "unable to find source filter to open `%s'\n", psz_filename );
    }
    FAIL_IF_ERROR( !avs_is_clip( res ), "`%s' didn't return a video clip\n", psz_filename );
    h->clip = h->func.avs_take_clip( res, h->env );
    const AVS_VideoInfo *vi = h->func.avs_get_video_info( h->clip );
    FAIL_IF_ERROR( !avs_has_video( vi ), "`%s' has no video data\n", psz_filename );
    /* if the clip is made of fields instead of frames, call weave to make them frames */
    if( avs_is_field_based( vi ) )
    {
        x264_cli_log( "avs", X264_LOG_WARNING, "detected fieldbased (separated) input, weaving to frames\n" );
        AVS_Value tmp = h->func.avs_invoke( h->env, "Weave", res, NULL );
        FAIL_IF_ERROR( avs_is_error( tmp ), "couldn't weave fields into frames: %s\n", avs_as_error( tmp ) );
        res = update_clip( h, &vi, tmp, res );
        info->interlaced = 1;
        info->tff = avs_is_tff( vi );
    }
#if !HAVE_SWSCALE
    /* if swscale is not available, convert the CSP if necessary */
    FAIL_IF_ERROR( avs_version < 2.6f && (opt->output_csp == X264_CSP_I400 || opt->output_csp == X264_CSP_I422 || opt->output_csp == X264_CSP_I444),
                   "avisynth >= 2.6 is required for i400/i422/i444 output\n" );
    if( (opt->output_csp == X264_CSP_I400 && !AVS_IS_Y( vi )) ||
        (opt->output_csp == X264_CSP_I420 && !AVS_IS_420( vi )) ||
        (opt->output_csp == X264_CSP_I422 && !AVS_IS_422( vi )) ||
        (opt->output_csp == X264_CSP_I444 && !AVS_IS_444( vi )) ||
        (opt->output_csp == X264_CSP_RGB && !avs_is_rgb( vi )) )
    {
        const char *csp;
        if( AVS_IS_AVISYNTHPLUS )
        {
            csp = opt->output_csp == X264_CSP_I400 ? "Y" :
                  opt->output_csp == X264_CSP_I420 ? "YUV420" :
                  opt->output_csp == X264_CSP_I422 ? "YUV422" :
                  opt->output_csp == X264_CSP_I444 ? "YUV444" :
                  "RGB";
        }
        else
        {
            csp = opt->output_csp == X264_CSP_I400 ? "Y8" :
                  opt->output_csp == X264_CSP_I420 ? "YV12" :
                  opt->output_csp == X264_CSP_I422 ? "YV16" :
                  opt->output_csp == X264_CSP_I444 ? "YV24" :
                  "RGB";
        }
        x264_cli_log( "avs", X264_LOG_WARNING, "converting input clip to %s\n", csp );
        if( opt->output_csp != X264_CSP_I400 )
        {
            FAIL_IF_ERROR( opt->output_csp < X264_CSP_I444 && (vi->width&1),
                           "input clip width not divisible by 2 (%dx%d)\n", vi->width, vi->height );
            FAIL_IF_ERROR( opt->output_csp == X264_CSP_I420 && info->interlaced && (vi->height&3),
                           "input clip height not divisible by 4 (%dx%d)\n", vi->width, vi->height );
            FAIL_IF_ERROR( (opt->output_csp == X264_CSP_I420 || info->interlaced) && (vi->height&1),
                           "input clip height not divisible by 2 (%dx%d)\n", vi->width, vi->height );
        }
        char conv_func[16];
        snprintf( conv_func, sizeof(conv_func), "ConvertTo%s", csp );
        AVS_Value arg_arr[3];
        const char *arg_name[3];
        int arg_count = 1;
        arg_arr[0] = res;
        arg_name[0] = NULL;
        if( opt->output_csp != X264_CSP_I400 )
        {
            arg_arr[arg_count] = avs_new_value_bool( info->interlaced );
            arg_name[arg_count] = "interlaced";
            arg_count++;
        }
        /* if doing a rgb <-> yuv conversion then range is handled via 'matrix'. though it's only supported in 2.56+ */
        char matrix[7];
        if( avs_version >= 2.56f && ((opt->output_csp == X264_CSP_RGB && avs_is_yuv( vi )) || (opt->output_csp != X264_CSP_RGB && avs_is_rgb( vi ))) )
        {
            // if converting from yuv, then we specify the matrix for the input, otherwise use the output's.
            int use_pc_matrix = avs_is_yuv( vi ) ? opt->input_range == RANGE_PC : opt->output_range == RANGE_PC;
            snprintf( matrix, sizeof(matrix), "%s601", use_pc_matrix ? "PC." : "Rec" ); /* FIXME: use correct coefficients */
            arg_arr[arg_count] = avs_new_value_string( matrix );
            arg_name[arg_count] = "matrix";
            arg_count++;
            // notification that the input range has changed to the desired one
            opt->input_range = opt->output_range;
        }
        AVS_Value res2 = h->func.avs_invoke( h->env, conv_func, avs_new_value_array( arg_arr, arg_count ), arg_name );
        FAIL_IF_ERROR( avs_is_error( res2 ), "couldn't convert input clip to %s: %s\n", csp, avs_as_error( res2 ) );
        res = update_clip( h, &vi, res2, res );
    }
    /* if swscale is not available, change the range if necessary. This only applies to YUV-based CSPs however */
    if( avs_is_yuv( vi ) && opt->output_range != RANGE_AUTO && ((opt->input_range == RANGE_PC) != opt->output_range) )
    {
        const char *levels = opt->output_range ? "TV->PC" : "PC->TV";
        x264_cli_log( "avs", X264_LOG_WARNING, "performing %s conversion\n", levels );
        AVS_Value arg_arr[2];
        arg_arr[0] = res;
        arg_arr[1] = avs_new_value_string( levels );
        const char *arg_name[] = { NULL, "levels" };
        AVS_Value res2 = h->func.avs_invoke( h->env, "ColorYUV", avs_new_value_array( arg_arr, 2 ), arg_name );
        FAIL_IF_ERROR( avs_is_error( res2 ), "couldn't convert range: %s\n", avs_as_error( res2 ) );
        res = update_clip( h, &vi, res2, res );
        // notification that the input range has changed to the desired one
        opt->input_range = opt->output_range;
    }
#endif

    h->func.avs_release_value( res );

    info->width   = vi->width;
    info->height  = vi->height;
    info->fps_num = vi->fps_numerator;
    info->fps_den = vi->fps_denominator;
    h->num_frames = info->num_frames = vi->num_frames;
    info->thread_safe = 1;
    if( AVS_IS_RGB64( vi ) )
        info->csp = X264_CSP_BGRA | X264_CSP_VFLIP | X264_CSP_HIGH_DEPTH;
    else if( avs_is_rgb32( vi ) )
        info->csp = X264_CSP_BGRA | X264_CSP_VFLIP;
    else if( AVS_IS_RGB48( vi ) )
        info->csp = X264_CSP_BGR | X264_CSP_VFLIP | X264_CSP_HIGH_DEPTH;
    else if( avs_is_rgb24( vi ) )
        info->csp = X264_CSP_BGR | X264_CSP_VFLIP;
    else if( AVS_IS_YUV444P16( vi ) )
        info->csp = X264_CSP_I444 | X264_CSP_HIGH_DEPTH;
    else if( avs_is_yv24( vi ) )
        info->csp = X264_CSP_I444;
    else if( AVS_IS_YUV422P16( vi ) )
        info->csp = X264_CSP_I422 | X264_CSP_HIGH_DEPTH;
    else if( avs_is_yv16( vi ) )
        info->csp = X264_CSP_I422;
    else if( AVS_IS_YUV420P16( vi ) )
        info->csp = X264_CSP_I420 | X264_CSP_HIGH_DEPTH;
    else if( avs_is_yv12( vi ) )
        info->csp = X264_CSP_I420;
    else if( AVS_IS_Y16( vi ) )
        info->csp = X264_CSP_I400 | X264_CSP_HIGH_DEPTH;
    else if( avs_is_y8( vi ) )
        info->csp = X264_CSP_I400;
    else if( avs_is_yuy2( vi ) )
        info->csp = X264_CSP_YUYV;
#if HAVE_SWSCALE
    else if( avs_is_yv411( vi ) )
        info->csp = AV_PIX_FMT_YUV411P | X264_CSP_OTHER;
#endif
    else
    {
        AVS_Value pixel_type = h->func.avs_invoke( h->env, "PixelType", res, NULL );
        const char *pixel_type_name = avs_is_string( pixel_type ) ? avs_as_string( pixel_type ) : "unknown";
        FAIL_IF_ERROR( 1, "not supported pixel type: %s\n", pixel_type_name );
    }
    info->vfr = 0;

    *p_handle = h;
    return 0;
}
Пример #9
0
static AVS_Value initialize_avisynth(avs_hnd_t *ah, LPSTR input)
{
    if (load_avisynth_dll(ah))
        return avs_void;

    ah->env = ah->func.avs_create_script_environment(AVS_INTERFACE_25);
    if (ah->func.avs_get_error && ah->func.avs_get_error(ah->env))
        return avs_void;

    ah->version = get_avisynth_version(ah);

    AVS_Value res = avs_void;
    switch (ah->ext) {
    case TYPE_AVS:
        res = import_avs(ah, input);
        break;
    case TYPE_D2V_DONALD:
        res = import_d2v_donald(ah, input);
        break;
    case TYPE_D2V_JACKIE:
#ifdef D2V_DVD2AVI_ENABLED
        res = import_d2v_jackie(ah, input);
        break;
#endif
    default:
        break;
    }

    if (avs_is_error(res) || !avs_defined(res))
        return res;

    if (ah->ext == TYPE_D2V_DONALD && ah->d2v.keyframe_judge)
        create_index(ah, input);

    if (ah->adjust_audio_length) {
        AVS_Value arg_arr[3] = {res, avs_new_value_int(0), avs_new_value_int(0)};
        AVS_Value tmp = ah->func.avs_invoke(ah->env, "Trim", avs_new_value_array(arg_arr, 3), NULL);
        ah->func.avs_release_value(res);
        res = tmp;
    }

    AVS_Value mt_test = ah->func.avs_invoke(ah->env, "GetMTMode", avs_new_value_bool(0), NULL);
    int mt_mode = avs_is_int(mt_test) ? avs_as_int(mt_test) : 0;
    ah->func.avs_release_value(mt_test);
    if (mt_mode > 0 && mt_mode < 5) {
        AVS_Value temp = ah->func.avs_invoke(ah->env, "Distributor", res, NULL);
        ah->func.avs_release_value(res);
        res = temp;
    }

    ah->clip = ah->func.avs_take_clip(res, ah->env);
    ah->vi = ah->func.avs_get_video_info(ah->clip);

    if (ah->highbit_depth && ah->version >= 260 && avs_is_planar(ah->vi)) {
        if (avs_is_yv411(ah->vi) ||
            ((avs_is_yv24(ah->vi) || avs_is_y8(ah->vi)) && (ah->vi->width & 1)) ||
            ((avs_is_yv16(ah->vi) || avs_is_yv12(ah->vi)) && (ah->vi->width & 3))) {
            ah->func.avs_release_value(res);
            return avs_void;
        }
        if (avs_is_yv12(ah->vi))
            res = invoke_filter(ah, res, "ConvertToYV16");

    } else if (avs_is_yv12(ah->vi) || avs_is_yv16(ah->vi) || avs_is_yv411(ah->vi)) {
        if (ah->func.avs_function_exists(ah->env, ah->yuy2converter))
            res = invoke_filter(ah, res, ah->yuy2converter);
        else
            res = invoke_filter(ah, res, "ConvertToYUY2");
    }

    if (avs_is_rgb32(ah->vi))
        res = invoke_filter(ah, res, "ConvertToRGB24");

    if (ah->vi->sample_type & 0x1C)
        res = invoke_filter(ah, res, "ConvertAudioTo16bit");

    return res;
}
Пример #10
0
static int open_file( char *psz_filename, hnd_t *p_handle, video_info_t *info, cli_input_opt_t *opt )
{
    FILE *fh = fopen( psz_filename, "r" );
    if( !fh )
        return -1;
    FAIL_IF_ERROR( !x264_is_regular_file( fh ), "AVS input is incompatible with non-regular file `%s'\n", psz_filename );
    fclose( fh );

    avs_hnd_t *h = malloc( sizeof(avs_hnd_t) );
    if( !h )
        return -1;
    FAIL_IF_ERROR( x264_avs_load_library( h ), "failed to load avisynth\n" )
    h->env = h->func.avs_create_script_environment( AVS_INTERFACE_25 );
    FAIL_IF_ERROR( !h->env, "failed to initiate avisynth\n" )
    AVS_Value arg = avs_new_value_string( psz_filename );
    AVS_Value res;
    char *filename_ext = get_filename_extension( psz_filename );

    if( !strcasecmp( filename_ext, "avs" ) )
    {
        res = h->func.avs_invoke( h->env, "Import", arg, NULL );
        FAIL_IF_ERROR( avs_is_error( res ), "%s\n", avs_as_string( res ) )
        /* check if the user is using a multi-threaded script and apply distributor if necessary.
           adapted from avisynth's vfw interface */
        AVS_Value mt_test = h->func.avs_invoke( h->env, "GetMTMode", avs_new_value_bool( 0 ), NULL );
        int mt_mode = avs_is_int( mt_test ) ? avs_as_int( mt_test ) : 0;
        h->func.avs_release_value( mt_test );
        if( mt_mode > 0 && mt_mode < 5 )
        {
            AVS_Value temp = h->func.avs_invoke( h->env, "Distributor", res, NULL );
            h->func.avs_release_value( res );
            res = temp;
        }
    }
    else /* non script file */
    {
        /* cycle through known source filters to find one that works */
        const char *filter[AVS_MAX_SEQUENCE+1] = { 0 };
        avs_build_filter_sequence( filename_ext, filter );
        int i;
        for( i = 0; filter[i]; i++ )
        {
            x264_cli_log( "avs", X264_LOG_INFO, "trying %s... ", filter[i] );
            if( !h->func.avs_function_exists( h->env, filter[i] ) )
            {
                x264_cli_printf( X264_LOG_INFO, "not found\n" );
                continue;
            }
            if( !strncasecmp( filter[i], "FFmpegSource", 12 ) )
            {
                x264_cli_printf( X264_LOG_INFO, "indexing... " );
                fflush( stderr );
            }
            res = h->func.avs_invoke( h->env, filter[i], arg, NULL );
            if( !avs_is_error( res ) )
            {
                x264_cli_printf( X264_LOG_INFO, "succeeded\n" );
                break;
            }
            x264_cli_printf( X264_LOG_INFO, "failed\n" );
        }
        FAIL_IF_ERROR( !filter[i], "unable to find source filter to open `%s'\n", psz_filename )
    }
    FAIL_IF_ERROR( !avs_is_clip( res ), "`%s' didn't return a video clip\n", psz_filename )
    h->clip = h->func.avs_take_clip( res, h->env );
    const AVS_VideoInfo *vi = h->func.avs_get_video_info( h->clip );
    FAIL_IF_ERROR( !avs_has_video( vi ), "`%s' has no video data\n", psz_filename )
    /* if the clip is made of fields instead of frames, call weave to make them frames */
    if( avs_is_field_based( vi ) )
    {
        x264_cli_log( "avs", X264_LOG_WARNING, "detected fieldbased (separated) input, weaving to frames\n" );
        AVS_Value tmp = h->func.avs_invoke( h->env, "Weave", res, NULL );
        FAIL_IF_ERROR( avs_is_error( tmp ), "couldn't weave fields into frames\n" )
        res = update_clip( h, &vi, tmp, res );
        info->interlaced = 1;
        info->tff = avs_is_tff( vi );
    }
#if !HAVE_SWSCALE
    /* if swscale is not available, convert CSPs to yv12 */
    if( !avs_is_yv12( vi ) )
    {
        x264_cli_log( "avs", X264_LOG_WARNING, "converting input clip to YV12\n" );
        FAIL_IF_ERROR( vi->width&1 || vi->height&1, "input clip width or height not divisible by 2 (%dx%d)\n", vi->width, vi->height )
        const char *arg_name[2] = { NULL, "interlaced" };
        AVS_Value arg_arr[2] = { res, avs_new_value_bool( info->interlaced ) };
        AVS_Value res2 = h->func.avs_invoke( h->env, "ConvertToYV12", avs_new_value_array( arg_arr, 2 ), arg_name );
        FAIL_IF_ERROR( avs_is_error( res2 ), "couldn't convert input clip to YV12\n" )
        res = update_clip( h, &vi, res2, res );
    }