Beispiel #1
0
static void rotate_destroy(struct mp_filter *f)
{
    struct rotate_priv *p = f->priv;

    mp_subfilter_reset(&p->sub);
    TA_FREEP(&p->sub.filter);
}
Beispiel #2
0
static void ao_chain_reset_state(struct ao_chain *ao_c)
{
    ao_c->last_out_pts = MP_NOPTS_VALUE;
    TA_FREEP(&ao_c->output_frame);
    ao_c->out_eof = false;

    mp_audio_buffer_clear(ao_c->ao_buffer);
}
Beispiel #3
0
void uninit_audio_out(struct MPContext *mpctx)
{
    if (mpctx->ao) {
        // Note: with gapless_audio, stop_play is not correctly set
        if (mpctx->opts->gapless_audio || mpctx->stop_play == AT_END_OF_FILE)
            ao_drain(mpctx->ao);
        ao_uninit(mpctx->ao);

        mp_notify(mpctx, MPV_EVENT_AUDIO_RECONFIG, NULL);
    }
    mpctx->ao = NULL;
    TA_FREEP(&mpctx->ao_filter_fmt);
}
Beispiel #4
0
static int d3d11va_init_decoder(struct lavc_ctx *s, int w, int h)
{
    HRESULT hr;
    int ret = -1;
    struct priv *p = s->hwdec_priv;
    TA_FREEP(&p->decoder);

    ID3D11Texture2D *texture = NULL;
    void *tmp = talloc_new(NULL);

    UINT n_guids = ID3D11VideoDevice_GetVideoDecoderProfileCount(p->video_dev);
    GUID *device_guids = talloc_array(tmp, GUID, n_guids);
    for (UINT i = 0; i < n_guids; i++) {
        GUID *guid = &device_guids[i];
        hr = ID3D11VideoDevice_GetVideoDecoderProfile(p->video_dev, i, guid);
        if (FAILED(hr)) {
            MP_ERR(p, "Failed to get VideoDecoderProfile %d: %s\n",
                   i, mp_HRESULT_to_str(hr));
            goto done;
        }
        dump_decoder_info(s, guid);
    }

    struct d3d_decoder_fmt fmt =
        d3d_select_decoder_mode(s, device_guids, n_guids,
                                d3d11_formats, MP_ARRAY_SIZE(d3d11_formats),
                                d3d11_format_supported);
    if (!fmt.format) {
        MP_ERR(p, "Failed to find a suitable decoder\n");
        goto done;
    }

    struct d3d11va_decoder *decoder = talloc_zero(tmp, struct d3d11va_decoder);
    talloc_set_destructor(decoder, d3d11va_destroy_decoder);
    decoder->mpfmt_decoded = fmt.format->mpfmt;

    int n_surfaces = hwdec_get_max_refs(s) + ADDITIONAL_SURFACES;
    int w_align = w, h_align = h;
    d3d_surface_align(s, &w_align, &h_align);

    D3D11_TEXTURE2D_DESC tex_desc = {
        .Width            = w_align,
        .Height           = h_align,
        .MipLevels        = 1,
        .Format           = fmt.format->dxfmt,
        .SampleDesc.Count = 1,
        .MiscFlags        = 0,
        .ArraySize        = n_surfaces,
        .Usage            = D3D11_USAGE_DEFAULT,
        .BindFlags        = D3D11_BIND_DECODER | D3D11_BIND_SHADER_RESOURCE,
        .CPUAccessFlags   = 0,
    };
    hr = ID3D11Device_CreateTexture2D(p->device, &tex_desc, NULL, &texture);
    if (FAILED(hr)) {
        MP_ERR(p, "Failed to create Direct3D11 texture with %d surfaces: %s\n",
               n_surfaces, mp_HRESULT_to_str(hr));
        goto done;
    }

    if (s->hwdec->type == HWDEC_D3D11VA_COPY) {
        // create staging texture shared with the CPU with mostly the same
        // parameters as the above decoder-bound texture
        ID3D11Texture2D_GetDesc(texture, &tex_desc);
        tex_desc.MipLevels      = 1;
        tex_desc.MiscFlags      = 0;
        tex_desc.ArraySize      = 1;
        tex_desc.Usage          = D3D11_USAGE_STAGING;
        tex_desc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
        tex_desc.BindFlags      = 0;
        hr = ID3D11Device_CreateTexture2D(p->device, &tex_desc, NULL,
                                          &decoder->staging);
        if (FAILED(hr)) {
            MP_ERR(p, "Failed to create staging texture: %s\n",
                   mp_HRESULT_to_str(hr));
            goto done;
        }
    }

    // pool to hold the mp_image wrapped surfaces
    decoder->pool = talloc_steal(decoder, mp_image_pool_new(n_surfaces));
    // array of the same surfaces (needed by ffmpeg)
    ID3D11VideoDecoderOutputView **surfaces =
        talloc_array_ptrtype(decoder->pool, surfaces, n_surfaces);

    D3D11_VIDEO_DECODER_OUTPUT_VIEW_DESC view_desc = {
        .DecodeProfile = *fmt.guid,
        .ViewDimension = D3D11_VDOV_DIMENSION_TEXTURE2D,
    };
    for (int i = 0; i < n_surfaces; i++) {
        ID3D11VideoDecoderOutputView **surface = &surfaces[i];
        view_desc.Texture2D.ArraySlice = i;
        hr = ID3D11VideoDevice_CreateVideoDecoderOutputView(
            p->video_dev, (ID3D11Resource *)texture, &view_desc, surface);
        if (FAILED(hr)) {
            MP_ERR(p, "Failed getting decoder output view %d: %s\n",
                   i, mp_HRESULT_to_str(hr));
            goto done;
        }
        struct mp_image *img = d3d11va_new_ref(*surface, w, h);
        ID3D11VideoDecoderOutputView_Release(*surface); // transferred to img
        if (!img) {
            MP_ERR(p, "Failed to create D3D11VA image %d\n", i);
            goto done;
        }
        mp_image_pool_add(decoder->pool, img); // transferred to pool
    }

    D3D11_VIDEO_DECODER_DESC decoder_desc = {
        .Guid         = *fmt.guid,
        .SampleWidth  = w,
        .SampleHeight = h,
        .OutputFormat = fmt.format->dxfmt,
    };
    UINT n_cfg;
    hr = ID3D11VideoDevice_GetVideoDecoderConfigCount(p->video_dev,
                                                      &decoder_desc, &n_cfg);
    if (FAILED(hr)) {
        MP_ERR(p, "Failed to get number of decoder configurations: %s)",
               mp_HRESULT_to_str(hr));
        goto done;
    }

    // pick the config with the highest score
    D3D11_VIDEO_DECODER_CONFIG *decoder_config =
        talloc_zero(decoder, D3D11_VIDEO_DECODER_CONFIG);
    unsigned max_score = 0;
    for (UINT i = 0; i < n_cfg; i++) {
        D3D11_VIDEO_DECODER_CONFIG cfg;
        hr = ID3D11VideoDevice_GetVideoDecoderConfig(p->video_dev,
                                                     &decoder_desc,
                                                     i, &cfg);
        if (FAILED(hr)) {
            MP_ERR(p, "Failed to get decoder config %d: %s\n",
                   i, mp_HRESULT_to_str(hr));
            goto done;
        }
        unsigned score = d3d_decoder_config_score(
            s, &cfg.guidConfigBitstreamEncryption, cfg.ConfigBitstreamRaw);
        if (score > max_score) {
            max_score       = score;
            *decoder_config = cfg;
        }
    }
    if (!max_score) {
        MP_ERR(p, "Failed to find a suitable decoder configuration\n");
        goto done;
    }

    hr = ID3D11VideoDevice_CreateVideoDecoder(p->video_dev, &decoder_desc,
                                              decoder_config,
                                              &decoder->decoder);
    if (FAILED(hr)) {
        MP_ERR(p, "Failed to create video decoder: %s\n",
               mp_HRESULT_to_str(hr));
        goto done;
    }

    struct AVD3D11VAContext *avd3d11va_ctx = s->avctx->hwaccel_context;
    avd3d11va_ctx->decoder       = decoder->decoder;
    avd3d11va_ctx->video_context = p->video_ctx;
    avd3d11va_ctx->cfg           = decoder_config;
    avd3d11va_ctx->surface_count = n_surfaces;
    avd3d11va_ctx->surface       = surfaces;
    avd3d11va_ctx->workaround    = is_clearvideo(fmt.guid) ?
                                   FF_DXVA2_WORKAROUND_INTEL_CLEARVIDEO : 0;

    p->decoder = talloc_steal(NULL, decoder);
    ret = 0;
done:
    // still referenced by pool images / surfaces
    if (texture)
        ID3D11Texture2D_Release(texture);

    talloc_free(tmp);
    return ret;
}

static void destroy_device(struct lavc_ctx *s)
{
    struct priv *p = s->hwdec_priv;

    if (p->device)
        ID3D11Device_Release(p->device);

    if (p->device_ctx)
        ID3D11DeviceContext_Release(p->device_ctx);
}

static bool create_device(struct lavc_ctx *s, BOOL thread_safe)
{
    HRESULT hr;
    struct priv *p = s->hwdec_priv;

    d3d_load_dlls();
    if (!d3d11_dll) {
        MP_ERR(p, "Failed to load D3D11 library\n");
        return false;
    }

    PFN_D3D11_CREATE_DEVICE CreateDevice =
        (void *)GetProcAddress(d3d11_dll, "D3D11CreateDevice");
    if (!CreateDevice) {
        MP_ERR(p, "Failed to get D3D11CreateDevice symbol from DLL: %s\n",
               mp_LastError_to_str());
        return false;
    }

    hr = CreateDevice(NULL, D3D_DRIVER_TYPE_HARDWARE, NULL,
                      D3D11_CREATE_DEVICE_VIDEO_SUPPORT, NULL, 0,
                      D3D11_SDK_VERSION, &p->device, NULL, &p->device_ctx);
    if (FAILED(hr)) {
        MP_ERR(p, "Failed to create D3D11 Device: %s\n",
               mp_HRESULT_to_str(hr));
        return false;
    }

    ID3D10Multithread *multithread;
    hr = ID3D11Device_QueryInterface(p->device, &IID_ID3D10Multithread,
                                     (void **)&multithread);
    if (FAILED(hr)) {
        MP_ERR(p, "Failed to get Multithread interface: %s\n",
               mp_HRESULT_to_str(hr));
        return false;
    }
    ID3D10Multithread_SetMultithreadProtected(multithread, thread_safe);
    ID3D10Multithread_Release(multithread);
    return true;
}

static void d3d11va_uninit(struct lavc_ctx *s)
{
    struct priv *p = s->hwdec_priv;
    if (!p)
        return;

    talloc_free(p->decoder);
    av_freep(&s->avctx->hwaccel_context);

    if (p->video_dev)
        ID3D11VideoDevice_Release(p->video_dev);

    if (p->video_ctx)
        ID3D11VideoContext_Release(p->video_ctx);

    destroy_device(s);

    TA_FREEP(&s->hwdec_priv);
}

static int d3d11va_init(struct lavc_ctx *s)
{
    HRESULT hr;
    struct priv *p = talloc_zero(NULL, struct priv);
    if (!p)
        return -1;

    s->hwdec_priv = p;
    p->log = mp_log_new(s, s->log, "d3d11va");
    if (s->hwdec->type == HWDEC_D3D11VA_COPY) {
        mp_check_gpu_memcpy(p->log, NULL);
        p->sw_pool = talloc_steal(p, mp_image_pool_new(17));
    }

    p->device = hwdec_devices_load(s->hwdec_devs, s->hwdec->type);
    if (p->device) {
        ID3D11Device_AddRef(p->device);
        ID3D11Device_GetImmediateContext(p->device, &p->device_ctx);
        if (!p->device_ctx)
            goto fail;
        MP_VERBOSE(p, "Using VO-supplied device %p.\n", p->device);
    } else if (s->hwdec->type == HWDEC_D3D11VA) {
        MP_ERR(p, "No Direct3D device provided for native d3d11 decoding\n");
        goto fail;
    } else {
        if (!create_device(s, FALSE))
            goto fail;
    }

    hr = ID3D11DeviceContext_QueryInterface(p->device_ctx,
                                            &IID_ID3D11VideoContext,
                                            (void **)&p->video_ctx);
    if (FAILED(hr)) {
        MP_ERR(p, "Failed to get VideoContext interface: %s\n",
               mp_HRESULT_to_str(hr));
        goto fail;
    }

    hr = ID3D11Device_QueryInterface(p->device,
                                     &IID_ID3D11VideoDevice,
                                     (void **)&p->video_dev);
    if (FAILED(hr)) {
        MP_ERR(p, "Failed to get VideoDevice interface. %s\n",
               mp_HRESULT_to_str(hr));
        goto fail;
    }

    s->avctx->hwaccel_context = av_d3d11va_alloc_context();
    if (!s->avctx->hwaccel_context) {
        MP_ERR(p, "Failed to allocate hwaccel_context\n");
        goto fail;
    }

    return 0;
fail:
    d3d11va_uninit(s);
    return -1;
}

static int d3d11va_probe(struct lavc_ctx *ctx, struct vd_lavc_hwdec *hwdec,
                         const char *codec)
{
    // d3d11va-copy can do without external context; dxva2 requires it.
    if (hwdec->type != HWDEC_D3D11VA_COPY) {
        if (!hwdec_devices_load(ctx->hwdec_devs, HWDEC_D3D11VA))
            return HWDEC_ERR_NO_CTX;
    }
    return d3d_probe_codec(codec);
}

const struct vd_lavc_hwdec mp_vd_lavc_d3d11va = {
    .type           = HWDEC_D3D11VA,
    .image_format   = IMGFMT_D3D11VA,
    .probe          = d3d11va_probe,
    .init           = d3d11va_init,
    .uninit         = d3d11va_uninit,
    .init_decoder   = d3d11va_init_decoder,
    .allocate_image = d3d11va_allocate_image,
    .process_image  = d3d11va_update_image_attribs,
};

const struct vd_lavc_hwdec mp_vd_lavc_d3d11va_copy = {
    .type           = HWDEC_D3D11VA_COPY,
    .copying        = true,
    .image_format   = IMGFMT_D3D11VA,
    .probe          = d3d11va_probe,
    .init           = d3d11va_init,
    .uninit         = d3d11va_uninit,
    .init_decoder   = d3d11va_init_decoder,
    .allocate_image = d3d11va_allocate_image,
    .process_image  = d3d11va_retrieve_image,
    .delay_queue    = HWDEC_DELAY_QUEUE_COUNT,
};
Beispiel #5
0
static int dxva2_init_decoder(struct lavc_ctx *s, int w, int h)
{
    HRESULT hr;
    int ret = -1;
    struct priv *p = s->hwdec_priv;
    TA_FREEP(&p->decoder_pool);

    int  n_surfaces = hwdec_get_max_refs(s) + ADDITIONAL_SURFACES;
    IDirect3DSurface9    **surfaces = NULL;
    IDirectXVideoDecoder  *decoder  = NULL;
    void                  *tmp      = talloc_new(NULL);

    UINT n_guids;
    GUID *device_guids;
    hr = IDirectXVideoDecoderService_GetDecoderDeviceGuids(
        p->decoder_service, &n_guids, &device_guids);
    if (FAILED(hr)) {
        MP_ERR(p, "Failed to retrieve decoder device GUIDs: %s\n",
               mp_HRESULT_to_str(hr));
        goto done;
    }

    dump_decoder_info(s, device_guids, n_guids);

    struct d3d_decoder_fmt fmt =
        d3d_select_decoder_mode(s, device_guids, n_guids,
                                d3d9_formats, MP_ARRAY_SIZE(d3d9_formats),
                                dxva2_format_supported);
    CoTaskMemFree(device_guids);
    if (!fmt.format) {
        MP_ERR(p, "Failed to find a suitable decoder\n");
        goto done;
    }

    p->mpfmt_decoded = fmt.format->mpfmt;
    struct mp_image_pool *decoder_pool =
        talloc_steal(tmp, mp_image_pool_new(n_surfaces));
    DXVA2_ConfigPictureDecode *decoder_config =
        talloc_zero(decoder_pool, DXVA2_ConfigPictureDecode);

    int w_align = w, h_align = h;
    d3d_surface_align(s, &w_align, &h_align);
    DXVA2_VideoDesc video_desc ={
        .SampleWidth  = w,
        .SampleHeight = h,
        .Format       = fmt.format->dxfmt,
    };
    UINT                     n_configs  = 0;
    DXVA2_ConfigPictureDecode *configs = NULL;
    hr = IDirectXVideoDecoderService_GetDecoderConfigurations(
        p->decoder_service, fmt.guid, &video_desc, NULL,
        &n_configs, &configs);
    if (FAILED(hr)) {
        MP_ERR(p, "Unable to retrieve decoder configurations: %s\n",
               mp_HRESULT_to_str(hr));
        goto done;
    }

    unsigned max_score = 0;
    for (UINT i = 0; i < n_configs; i++) {
        unsigned score = d3d_decoder_config_score(
            s, &configs[i].guidConfigBitstreamEncryption,
            configs[i].ConfigBitstreamRaw);
        if (score > max_score) {
            max_score       = score;
            *decoder_config = configs[i];
        }
    }
    CoTaskMemFree(configs);
    if (!max_score) {
        MP_ERR(p, "Failed to find a suitable decoder configuration\n");
        goto done;
    }

    surfaces = talloc_zero_array(decoder_pool, IDirect3DSurface9*, n_surfaces);
    hr = IDirectXVideoDecoderService_CreateSurface(
        p->decoder_service,
        w_align, h_align,
        n_surfaces - 1, fmt.format->dxfmt, D3DPOOL_DEFAULT, 0,
        DXVA2_VideoDecoderRenderTarget, surfaces, NULL);
    if (FAILED(hr)) {
        MP_ERR(p, "Failed to create %d video surfaces: %s\n",
               n_surfaces, mp_HRESULT_to_str(hr));
        goto done;
    }

    hr = IDirectXVideoDecoderService_CreateVideoDecoder(
        p->decoder_service, fmt.guid, &video_desc, decoder_config,
        surfaces, n_surfaces, &decoder);
    if (FAILED(hr)) {
        MP_ERR(p, "Failed to create DXVA2 video decoder: %s\n",
               mp_HRESULT_to_str(hr));
        goto done;
    }

    for (int i = 0; i < n_surfaces; i++) {
        struct mp_image *img = dxva2_new_ref(decoder, surfaces[i], w, h);
        if (!img) {
            MP_ERR(p, "Failed to create DXVA2 image\n");
            goto done;
        }
        mp_image_pool_add(decoder_pool, img); // transferred to pool
    }

    // Pass required information on to ffmpeg.
    struct dxva_context *dxva_ctx = s->avctx->hwaccel_context;
    dxva_ctx->cfg           = decoder_config;
    dxva_ctx->decoder       = decoder;
    dxva_ctx->surface_count = n_surfaces;
    dxva_ctx->surface       = surfaces;
    dxva_ctx->workaround    = is_clearvideo(fmt.guid) ?
                              FF_DXVA2_WORKAROUND_INTEL_CLEARVIDEO : 0;

    p->decoder_pool = talloc_steal(NULL, decoder_pool);
    ret = 0;
done:
    // On success, `p->decoder_pool` mp_images still hold refs to `surfaces` and
    // `decoder`, so the pointers in the ffmpeg `dxva_context` strcture remain
    // valid for the lifetime of the pool.
    if (surfaces) {
        for (int i = 0; i < n_surfaces; i++)
            IDirect3DSurface9_Release(surfaces[i]);
    }
    if (decoder)
        IDirectXVideoDecoder_Release(decoder);

    talloc_free(tmp);
    return ret;
}

static void destroy_device(struct lavc_ctx *s)
{
    struct priv *p = s->hwdec_priv;

    if (p->device)
        IDirect3DDevice9_Release(p->device);

    if (p->d3d9)
        IDirect3D9_Release(p->d3d9);
}

static bool create_device(struct lavc_ctx *s)
{
    struct priv *p = s->hwdec_priv;

    d3d_load_dlls();
    if (!d3d9_dll) {
        MP_ERR(p, "Failed to load D3D9 library\n");
        return false;
    }

    HRESULT (WINAPI *Direct3DCreate9Ex)(UINT, IDirect3D9Ex **) =
        (void *)GetProcAddress(d3d9_dll, "Direct3DCreate9Ex");
    if (!Direct3DCreate9Ex) {
        MP_ERR(p, "Failed to locate Direct3DCreate9Ex\n");
        return false;
    }

    IDirect3D9Ex *d3d9ex = NULL;
    HRESULT hr = Direct3DCreate9Ex(D3D_SDK_VERSION, &d3d9ex);
    if (FAILED(hr)) {
        MP_ERR(p, "Failed to create IDirect3D9Ex object\n");
        return false;
    }

    UINT adapter = D3DADAPTER_DEFAULT;
    D3DDISPLAYMODEEX modeex = {0};
    IDirect3D9Ex_GetAdapterDisplayModeEx(d3d9ex, adapter, &modeex, NULL);

    D3DPRESENT_PARAMETERS present_params = {
        .Windowed         = TRUE,
        .BackBufferWidth  = 640,
        .BackBufferHeight = 480,
        .BackBufferCount  = 0,
        .BackBufferFormat = modeex.Format,
        .SwapEffect       = D3DSWAPEFFECT_DISCARD,
        .Flags            = D3DPRESENTFLAG_VIDEO,
    };

    IDirect3DDevice9Ex *exdev = NULL;
    hr = IDirect3D9Ex_CreateDeviceEx(d3d9ex, adapter,
                                     D3DDEVTYPE_HAL,
                                     GetShellWindow(),
                                     D3DCREATE_SOFTWARE_VERTEXPROCESSING |
                                     D3DCREATE_MULTITHREADED |
                                     D3DCREATE_FPU_PRESERVE,
                                     &present_params,
                                     NULL,
                                     &exdev);
    if (FAILED(hr)) {
        MP_ERR(p, "Failed to create Direct3D device: %s\n",
               mp_HRESULT_to_str(hr));
        IDirect3D9_Release(d3d9ex);
        return false;
    }

    p->d3d9 = (IDirect3D9 *)d3d9ex;
    p->device = (IDirect3DDevice9 *)exdev;
    return true;
}

static void dxva2_uninit(struct lavc_ctx *s)
{
    struct priv *p = s->hwdec_priv;
    if (!p)
        return;

    av_freep(&s->avctx->hwaccel_context);
    talloc_free(p->decoder_pool);

    if (p->decoder_service)
        IDirectXVideoDecoderService_Release(p->decoder_service);

    if (p->device_manager && p->device_handle != INVALID_HANDLE_VALUE)
        IDirect3DDeviceManager9_CloseDeviceHandle(p->device_manager, p->device_handle);

    if (p->device_manager)
        IDirect3DDeviceManager9_Release(p->device_manager);

    destroy_device(s);

    TA_FREEP(&s->hwdec_priv);
}

static int dxva2_init(struct lavc_ctx *s)
{
    HRESULT hr;
    struct priv *p = talloc_zero(NULL, struct priv);
    if (!p)
        return -1;

    s->hwdec_priv = p;
    p->device_handle = INVALID_HANDLE_VALUE;
    p->log           = mp_log_new(s, s->log, "dxva2");

    if (s->hwdec->type == HWDEC_DXVA2_COPY) {
        mp_check_gpu_memcpy(p->log, NULL);
        p->sw_pool = talloc_steal(p, mp_image_pool_new(17));
    }

    p->device = hwdec_devices_load(s->hwdec_devs, s->hwdec->type);
    if (p->device) {
        IDirect3D9_AddRef(p->device);
        MP_VERBOSE(p, "Using VO-supplied device %p.\n", p->device);
    } else if (s->hwdec->type == HWDEC_DXVA2) {
        MP_ERR(p, "No Direct3D device provided for native dxva2 decoding\n");
        goto fail;
    } else {
        if (!create_device(s))
            goto fail;
    }

    d3d_load_dlls();
    if (!dxva2_dll) {
        MP_ERR(p, "Failed to load DXVA2 library\n");
        goto fail;
    }

    HRESULT (WINAPI *CreateDeviceManager9)(UINT *, IDirect3DDeviceManager9 **) =
        (void *)GetProcAddress(dxva2_dll, "DXVA2CreateDirect3DDeviceManager9");
    if (!CreateDeviceManager9) {
        MP_ERR(p, "Failed to locate DXVA2CreateDirect3DDeviceManager9\n");
        goto fail;
    }

    unsigned reset_token = 0;
    hr = CreateDeviceManager9(&reset_token, &p->device_manager);
    if (FAILED(hr)) {
        MP_ERR(p, "Failed to create Direct3D device manager: %s\n",
               mp_HRESULT_to_str(hr));
        goto fail;
    }

    hr = IDirect3DDeviceManager9_ResetDevice(p->device_manager,
                                             p->device, reset_token);
    if (FAILED(hr)) {
        MP_ERR(p, "Failed to bind Direct3D device to device manager: %s\n",
               mp_HRESULT_to_str(hr));
        goto fail;
    }

    hr = IDirect3DDeviceManager9_OpenDeviceHandle(p->device_manager,
                                                  &p->device_handle);
    if (FAILED(hr)) {
        MP_ERR(p, "Failed to open device handle: %s\n",
               mp_HRESULT_to_str(hr));
        goto fail;
    }

    hr = IDirect3DDeviceManager9_GetVideoService(
        p->device_manager, p->device_handle, &IID_IDirectXVideoDecoderService,
        (void **)&p->decoder_service);
    if (FAILED(hr)) {
        MP_ERR(p, "Failed to create IDirectXVideoDecoderService: %s\n",
               mp_HRESULT_to_str(hr));
        goto fail;
    }

    s->avctx->hwaccel_context = av_mallocz(sizeof(struct dxva_context));
    if (!s->avctx->hwaccel_context)
        goto fail;

    return 0;
fail:
    dxva2_uninit(s);
    return -1;
}

static int dxva2_probe(struct lavc_ctx *ctx, struct vd_lavc_hwdec *hwdec,
                       const char *codec)
{
    // dxva2-copy can do without external context; dxva2 requires it.
    if (hwdec->type == HWDEC_DXVA2) {
        if (!hwdec_devices_load(ctx->hwdec_devs, HWDEC_DXVA2))
            return HWDEC_ERR_NO_CTX;
    } else {
        hwdec_devices_load(ctx->hwdec_devs, HWDEC_DXVA2_COPY);
    }
    return d3d_probe_codec(codec);
}

const struct vd_lavc_hwdec mp_vd_lavc_dxva2 = {
    .type           = HWDEC_DXVA2,
    .image_format   = IMGFMT_DXVA2,
    .probe          = dxva2_probe,
    .init           = dxva2_init,
    .uninit         = dxva2_uninit,
    .init_decoder   = dxva2_init_decoder,
    .allocate_image = dxva2_allocate_image,
};

const struct vd_lavc_hwdec mp_vd_lavc_dxva2_copy = {
    .type           = HWDEC_DXVA2_COPY,
    .copying        = true,
    .image_format   = IMGFMT_DXVA2,
    .probe          = dxva2_probe,
    .init           = dxva2_init,
    .uninit         = dxva2_uninit,
    .init_decoder   = dxva2_init_decoder,
    .allocate_image = dxva2_allocate_image,
    .process_image  = dxva2_retrieve_image,
    .delay_queue    = HWDEC_DELAY_QUEUE_COUNT,
};
Beispiel #6
0
static int recreate_video_proc(struct mp_filter *vf)
{
    struct priv *p = vf->priv;
    HRESULT hr;

    destroy_video_proc(vf);

    D3D11_VIDEO_PROCESSOR_CONTENT_DESC vpdesc = {
        .InputFrameFormat = p->d3d_frame_format,
        .InputWidth = p->c_w,
        .InputHeight = p->c_h,
        .OutputWidth = p->params.w,
        .OutputHeight = p->params.h,
    };
    hr = ID3D11VideoDevice_CreateVideoProcessorEnumerator(p->video_dev, &vpdesc,
                                                          &p->vp_enum);
    if (FAILED(hr))
        goto fail;

    D3D11_VIDEO_PROCESSOR_CAPS caps;
    hr = ID3D11VideoProcessorEnumerator_GetVideoProcessorCaps(p->vp_enum, &caps);
    if (FAILED(hr))
        goto fail;

    MP_VERBOSE(vf, "Found %d rate conversion caps. Looking for caps=0x%x.\n",
               (int)caps.RateConversionCapsCount, p->opts->mode);

    int rindex = -1;
    for (int n = 0; n < caps.RateConversionCapsCount; n++) {
        D3D11_VIDEO_PROCESSOR_RATE_CONVERSION_CAPS rcaps;
        hr = ID3D11VideoProcessorEnumerator_GetVideoProcessorRateConversionCaps
                (p->vp_enum, n, &rcaps);
        if (FAILED(hr))
            goto fail;
        MP_VERBOSE(vf, "  - %d: 0x%08x\n", n, (unsigned)rcaps.ProcessorCaps);
        if (rcaps.ProcessorCaps & p->opts->mode) {
            MP_VERBOSE(vf, "       (matching)\n");
            if (rindex < 0)
                rindex = n;
        }
    }

    if (rindex < 0) {
        MP_WARN(vf, "No fitting video processor found, picking #0.\n");
        rindex = 0;
    }

    // TOOD: so, how do we select which rate conversion mode the processor uses?

    hr = ID3D11VideoDevice_CreateVideoProcessor(p->video_dev, p->vp_enum, rindex,
                                                &p->video_proc);
    if (FAILED(hr)) {
        MP_ERR(vf, "Failed to create D3D11 video processor.\n");
        goto fail;
    }

    // Note: libavcodec does not support cropping left/top with hwaccel.
    RECT src_rc = {
        .right = p->params.w,
        .bottom = p->params.h,
    };
    ID3D11VideoContext_VideoProcessorSetStreamSourceRect(p->video_ctx,
                                                         p->video_proc,
                                                         0, TRUE, &src_rc);

    // This is supposed to stop drivers from f*****g up the video quality.
    ID3D11VideoContext_VideoProcessorSetStreamAutoProcessingMode(p->video_ctx,
                                                                 p->video_proc,
                                                                 0, FALSE);

    ID3D11VideoContext_VideoProcessorSetStreamOutputRate(p->video_ctx,
                                                         p->video_proc,
                                                         0,
                                                         D3D11_VIDEO_PROCESSOR_OUTPUT_RATE_NORMAL,
                                                         FALSE, 0);

    D3D11_VIDEO_PROCESSOR_COLOR_SPACE csp = {
        .YCbCr_Matrix = p->params.color.space != MP_CSP_BT_601,
        .Nominal_Range = p->params.color.levels == MP_CSP_LEVELS_TV ? 1 : 2,
    };
    ID3D11VideoContext_VideoProcessorSetStreamColorSpace(p->video_ctx,
                                                         p->video_proc,
                                                         0, &csp);
    if (p->out_rgb) {
        if (p->params.color.space != MP_CSP_BT_601 &&
            p->params.color.space != MP_CSP_BT_709)
        {
            MP_WARN(vf, "Unsupported video colorspace (%s/%s). Consider "
                    "disabling hardware decoding, or using "
                    "--hwdec=d3d11va-copy to get correct output.\n",
                    m_opt_choice_str(mp_csp_names, p->params.color.space),
                    m_opt_choice_str(mp_csp_levels_names, p->params.color.levels));
        }
    } else {
        ID3D11VideoContext_VideoProcessorSetOutputColorSpace(p->video_ctx,
                                                             p->video_proc,
                                                             &csp);
    }

    return 0;
fail:
    destroy_video_proc(vf);
    return -1;
}

static struct mp_image *render(struct mp_filter *vf)
{
    struct priv *p = vf->priv;
    int res = -1;
    HRESULT hr;
    ID3D11VideoProcessorInputView *in_view = NULL;
    ID3D11VideoProcessorOutputView *out_view = NULL;
    struct mp_image *in = NULL, *out = NULL;
    out = mp_image_pool_get(p->pool, IMGFMT_D3D11, p->params.w, p->params.h);
    if (!out) {
        MP_WARN(vf, "failed to allocate frame\n");
        goto cleanup;
    }

    ID3D11Texture2D *d3d_out_tex = (void *)out->planes[0];

    in = mp_refqueue_get(p->queue, 0);
    if (!in)
        goto cleanup;
    ID3D11Texture2D *d3d_tex = (void *)in->planes[0];
    int d3d_subindex = (intptr_t)in->planes[1];

    mp_image_copy_attributes(out, in);

    D3D11_VIDEO_FRAME_FORMAT d3d_frame_format;
    if (!mp_refqueue_should_deint(p->queue)) {
        d3d_frame_format = D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE;
    } else if (mp_refqueue_top_field_first(p->queue)) {
        d3d_frame_format = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_TOP_FIELD_FIRST;
    } else {
        d3d_frame_format = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_BOTTOM_FIELD_FIRST;
    }

    D3D11_TEXTURE2D_DESC texdesc;
    ID3D11Texture2D_GetDesc(d3d_tex, &texdesc);
    if (!p->video_proc || p->c_w != texdesc.Width || p->c_h != texdesc.Height ||
        p->d3d_frame_format != d3d_frame_format)
    {
        p->c_w = texdesc.Width;
        p->c_h = texdesc.Height;
        p->d3d_frame_format = d3d_frame_format;
        if (recreate_video_proc(vf) < 0)
            goto cleanup;
    }

    if (!mp_refqueue_should_deint(p->queue)) {
        d3d_frame_format = D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE;
    } else if (mp_refqueue_is_top_field(p->queue)) {
        d3d_frame_format = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_TOP_FIELD_FIRST;
    } else {
        d3d_frame_format = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_BOTTOM_FIELD_FIRST;
    }

    ID3D11VideoContext_VideoProcessorSetStreamFrameFormat(p->video_ctx,
                                                          p->video_proc,
                                                          0, d3d_frame_format);

    D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC indesc = {
        .ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D,
        .Texture2D = {
            .ArraySlice = d3d_subindex,
        },
    };
    hr = ID3D11VideoDevice_CreateVideoProcessorInputView(p->video_dev,
                                                         (ID3D11Resource *)d3d_tex,
                                                         p->vp_enum, &indesc,
                                                         &in_view);
    if (FAILED(hr)) {
        MP_ERR(vf, "Could not create ID3D11VideoProcessorInputView\n");
        goto cleanup;
    }

    D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC outdesc = {
        .ViewDimension = D3D11_VPOV_DIMENSION_TEXTURE2D,
    };
    hr = ID3D11VideoDevice_CreateVideoProcessorOutputView(p->video_dev,
                                                          (ID3D11Resource *)d3d_out_tex,
                                                          p->vp_enum, &outdesc,
                                                          &out_view);
    if (FAILED(hr)) {
        MP_ERR(vf, "Could not create ID3D11VideoProcessorOutputView\n");
        goto cleanup;
    }

    D3D11_VIDEO_PROCESSOR_STREAM stream = {
        .Enable = TRUE,
        .pInputSurface = in_view,
    };
    int frame = mp_refqueue_is_second_field(p->queue);
    hr = ID3D11VideoContext_VideoProcessorBlt(p->video_ctx, p->video_proc,
                                              out_view, frame, 1, &stream);
    if (FAILED(hr)) {
        MP_ERR(vf, "VideoProcessorBlt failed.\n");
        goto cleanup;
    }

    res = 0;
cleanup:
    if (in_view)
        ID3D11VideoProcessorInputView_Release(in_view);
    if (out_view)
        ID3D11VideoProcessorOutputView_Release(out_view);
    if (res < 0)
        TA_FREEP(&out);
    return out;
}

static bool vo_supports(struct priv *p, int subfmt)
{
    for (int n = 0; p->vo_formats && p->vo_formats[n]; n++) {
        if (p->vo_formats[n] == subfmt)
            return true;
    }
    return false;
}

static void vf_d3d11vpp_process(struct mp_filter *vf)
{
    struct priv *p = vf->priv;

    struct mp_image *in_fmt = mp_refqueue_execute_reinit(p->queue);
    if (in_fmt) {
        mp_image_pool_clear(p->pool);

        destroy_video_proc(vf);

        p->params = in_fmt->params;
        p->out_params = p->params;

        if (vo_supports(p, IMGFMT_NV12)) {
            p->out_params.hw_subfmt = IMGFMT_NV12;
            p->out_format = DXGI_FORMAT_NV12;
            p->out_shared = false;
            p->out_rgb = false;
        } else {
            p->out_params.hw_subfmt = IMGFMT_RGB0;
            p->out_format = DXGI_FORMAT_B8G8R8A8_UNORM;
            p->out_shared = true;
            p->out_rgb = true;
        }
        p->out_params.hw_flags = 0;

        p->require_filtering = p->params.hw_subfmt != p->out_params.hw_subfmt;
    }

    if (!mp_refqueue_can_output(p->queue))
        return;

    if (!mp_refqueue_should_deint(p->queue) && !p->require_filtering) {
        // no filtering
        struct mp_image *in = mp_image_new_ref(mp_refqueue_get(p->queue, 0));
        if (!in) {
            mp_filter_internal_mark_failed(vf);
            return;
        }
        mp_refqueue_write_out_pin(p->queue, in);
    } else {
        mp_refqueue_write_out_pin(p->queue, render(vf));
    }
}

static void uninit(struct mp_filter *vf)
{
    struct priv *p = vf->priv;

    destroy_video_proc(vf);

    flush_frames(vf);
    talloc_free(p->queue);
    talloc_free(p->pool);

    if (p->video_ctx)
        ID3D11VideoContext_Release(p->video_ctx);

    if (p->video_dev)
        ID3D11VideoDevice_Release(p->video_dev);

    if (p->device_ctx)
        ID3D11DeviceContext_Release(p->device_ctx);

    if (p->vo_dev)
        ID3D11Device_Release(p->vo_dev);
}

static const struct mp_filter_info vf_d3d11vpp_filter = {
    .name = "d3d11vpp",
    .process = vf_d3d11vpp_process,
    .reset = flush_frames,
    .destroy = uninit,
    .priv_size = sizeof(struct priv),
};

static struct mp_filter *vf_d3d11vpp_create(struct mp_filter *parent,
                                            void *options)
{
    struct mp_filter *f = mp_filter_create(parent, &vf_d3d11vpp_filter);
    if (!f) {
        talloc_free(options);
        return NULL;
    }

    mp_filter_add_pin(f, MP_PIN_IN, "in");
    mp_filter_add_pin(f, MP_PIN_OUT, "out");

    struct priv *p = f->priv;
    p->opts = talloc_steal(p, options);

    // Special path for vf_d3d11_create_outconv(): disable all processing except
    // possibly surface format conversions.
    if (!p->opts) {
        static const struct opts opts = {0};
        p->opts = (struct opts *)&opts;
    }

    p->queue = mp_refqueue_alloc(f);

    struct mp_stream_info *info = mp_filter_find_stream_info(f);
    if (!info || !info->hwdec_devs)
        goto fail;

    hwdec_devices_request_all(info->hwdec_devs);

    struct mp_hwdec_ctx *hwctx =
        hwdec_devices_get_by_lavc(info->hwdec_devs, AV_HWDEVICE_TYPE_D3D11VA);
    if (!hwctx || !hwctx->av_device_ref)
        goto fail;
    AVHWDeviceContext *avhwctx = (void *)hwctx->av_device_ref->data;
    AVD3D11VADeviceContext *d3dctx = avhwctx->hwctx;

    p->vo_dev = d3dctx->device;
    ID3D11Device_AddRef(p->vo_dev);

    p->vo_formats = hwctx->supported_formats;

    HRESULT hr;

    hr = ID3D11Device_QueryInterface(p->vo_dev, &IID_ID3D11VideoDevice,
                                     (void **)&p->video_dev);
    if (FAILED(hr))
        goto fail;

    ID3D11Device_GetImmediateContext(p->vo_dev, &p->device_ctx);
    if (!p->device_ctx)
        goto fail;
    hr = ID3D11DeviceContext_QueryInterface(p->device_ctx, &IID_ID3D11VideoContext,
                                            (void **)&p->video_ctx);
    if (FAILED(hr))
        goto fail;

    p->pool = mp_image_pool_new(f);
    mp_image_pool_set_allocator(p->pool, alloc_pool, f);
    mp_image_pool_set_lru(p->pool);

    mp_refqueue_add_in_format(p->queue, IMGFMT_D3D11, 0);

    mp_refqueue_set_refs(p->queue, 0, 0);
    mp_refqueue_set_mode(p->queue,
        (p->opts->deint_enabled ? MP_MODE_DEINT : 0) |
        MP_MODE_OUTPUT_FIELDS |
        (p->opts->interlaced_only ? MP_MODE_INTERLACED_ONLY : 0));

    return f;

fail:
    talloc_free(f);
    return NULL;
}

#define OPT_BASE_STRUCT struct opts
static const m_option_t vf_opts_fields[] = {
    OPT_FLAG("deint", deint_enabled, 0),
    OPT_FLAG("interlaced-only", interlaced_only, 0),
    OPT_CHOICE("mode", mode, 0,
        ({"blend", D3D11_VIDEO_PROCESSOR_PROCESSOR_CAPS_DEINTERLACE_BLEND},
         {"bob", D3D11_VIDEO_PROCESSOR_PROCESSOR_CAPS_DEINTERLACE_BOB},
         {"adaptive", D3D11_VIDEO_PROCESSOR_PROCESSOR_CAPS_DEINTERLACE_ADAPTIVE},
         {"mocomp", D3D11_VIDEO_PROCESSOR_PROCESSOR_CAPS_DEINTERLACE_MOTION_COMPENSATION},
         {"ivctc", D3D11_VIDEO_PROCESSOR_PROCESSOR_CAPS_INVERSE_TELECINE},