/** * It releases a Direct3D device and its resources. */ static void D3dDestroyDevice(vlc_va_t *va) { if (va->sys->d3dvidctx) ID3D11VideoContext_Release(va->sys->d3dvidctx); if (va->sys->d3dctx) ID3D11DeviceContext_Release(va->sys->d3dctx); }
static void gl_free(void) { capture_free(); if (data.using_shtex) { if (data.gl_dxobj) jimglDXUnregisterObjectNV(data.gl_device, data.gl_dxobj); if (data.gl_device) jimglDXCloseDeviceNV(data.gl_device); if (data.texture) glDeleteTextures(1, &data.texture); if (data.d3d11_tex) ID3D11Resource_Release(data.d3d11_tex); if (data.d3d11_context) ID3D11DeviceContext_Release(data.d3d11_context); if (data.d3d11_device) ID3D11Device_Release(data.d3d11_device); if (data.dxgi_swap) IDXGISwapChain_Release(data.dxgi_swap); if (data.hwnd) DestroyWindow(data.hwnd); } else { for (size_t i = 0; i < NUM_BUFFERS; i++) { if (data.pbos[i]) { if (data.texture_mapped[i]) { glBindBuffer(GL_PIXEL_PACK_BUFFER, data.pbos[i]); glUnmapBuffer(GL_PIXEL_PACK_BUFFER); glBindBuffer(GL_PIXEL_PACK_BUFFER, 0); } glDeleteBuffers(1, &data.pbos[i]); } if (data.textures[i]) glDeleteTextures(1, &data.textures[i]); } } if (data.fbo) glDeleteFramebuffers(1, &data.fbo); gl_error("gl_free", "GL error occurred on free"); memset(&data, 0, sizeof(data)); hlog("------------------ gl capture freed ------------------"); }
static void test_create_device(void) { D3D_FEATURE_LEVEL feature_level, supported_feature_level; ID3D11DeviceContext *immediate_context = NULL; ID3D11Device *device; ULONG refcount; HRESULT hr; hr = D3D11CreateDevice(NULL, D3D_DRIVER_TYPE_HARDWARE, NULL, 0, NULL, 0, D3D11_SDK_VERSION, &device, NULL, NULL); if (FAILED(hr)) { skip("Failed to create HAL device, skipping tests.\n"); return; } supported_feature_level = ID3D11Device_GetFeatureLevel(device); ID3D11Device_Release(device); hr = D3D11CreateDevice(NULL, D3D_DRIVER_TYPE_HARDWARE, NULL, 0, NULL, 0, D3D11_SDK_VERSION, NULL, NULL, NULL); ok(SUCCEEDED(hr), "D3D11CreateDevice failed %#x.\n", hr); hr = D3D11CreateDevice(NULL, D3D_DRIVER_TYPE_HARDWARE, NULL, 0, NULL, 0, D3D11_SDK_VERSION, NULL, &feature_level, NULL); ok(SUCCEEDED(hr), "D3D11CreateDevice failed %#x.\n", hr); ok(feature_level == supported_feature_level, "Got feature level %#x, expected %#x.\n", feature_level, supported_feature_level); hr = D3D11CreateDevice(NULL, D3D_DRIVER_TYPE_HARDWARE, NULL, 0, NULL, 0, D3D11_SDK_VERSION, NULL, NULL, &immediate_context); ok(SUCCEEDED(hr), "D3D11CreateDevice failed %#x.\n", hr); todo_wine ok(!!immediate_context, "Immediate context is NULL.\n"); if (!immediate_context) return; refcount = get_refcount((IUnknown *)immediate_context); ok(refcount == 1, "Got refcount %u, expected 1.\n", refcount); ID3D11DeviceContext_GetDevice(immediate_context, &device); refcount = ID3D11Device_Release(device); ok(refcount == 1, "Got refcount %u, expected 1.\n", refcount); refcount = ID3D11DeviceContext_Release(immediate_context); ok(!refcount, "ID3D11DeviceContext has %u references left.\n", refcount); }
static int d3d11va_init_decoder(struct lavc_ctx *s, int w, int h) { HRESULT hr; int ret = -1; struct priv *p = s->hwdec_priv; TA_FREEP(&p->decoder); ID3D11Texture2D *texture = NULL; void *tmp = talloc_new(NULL); UINT n_guids = ID3D11VideoDevice_GetVideoDecoderProfileCount(p->video_dev); GUID *device_guids = talloc_array(tmp, GUID, n_guids); for (UINT i = 0; i < n_guids; i++) { GUID *guid = &device_guids[i]; hr = ID3D11VideoDevice_GetVideoDecoderProfile(p->video_dev, i, guid); if (FAILED(hr)) { MP_ERR(p, "Failed to get VideoDecoderProfile %d: %s\n", i, mp_HRESULT_to_str(hr)); goto done; } dump_decoder_info(s, guid); } struct d3d_decoder_fmt fmt = d3d_select_decoder_mode(s, device_guids, n_guids, d3d11_formats, MP_ARRAY_SIZE(d3d11_formats), d3d11_format_supported); if (!fmt.format) { MP_ERR(p, "Failed to find a suitable decoder\n"); goto done; } struct d3d11va_decoder *decoder = talloc_zero(tmp, struct d3d11va_decoder); talloc_set_destructor(decoder, d3d11va_destroy_decoder); decoder->mpfmt_decoded = fmt.format->mpfmt; int n_surfaces = hwdec_get_max_refs(s) + ADDITIONAL_SURFACES; int w_align = w, h_align = h; d3d_surface_align(s, &w_align, &h_align); D3D11_TEXTURE2D_DESC tex_desc = { .Width = w_align, .Height = h_align, .MipLevels = 1, .Format = fmt.format->dxfmt, .SampleDesc.Count = 1, .MiscFlags = 0, .ArraySize = n_surfaces, .Usage = D3D11_USAGE_DEFAULT, .BindFlags = D3D11_BIND_DECODER | D3D11_BIND_SHADER_RESOURCE, .CPUAccessFlags = 0, }; hr = ID3D11Device_CreateTexture2D(p->device, &tex_desc, NULL, &texture); if (FAILED(hr)) { MP_ERR(p, "Failed to create Direct3D11 texture with %d surfaces: %s\n", n_surfaces, mp_HRESULT_to_str(hr)); goto done; } if (s->hwdec->type == HWDEC_D3D11VA_COPY) { // create staging texture shared with the CPU with mostly the same // parameters as the above decoder-bound texture ID3D11Texture2D_GetDesc(texture, &tex_desc); tex_desc.MipLevels = 1; tex_desc.MiscFlags = 0; tex_desc.ArraySize = 1; tex_desc.Usage = D3D11_USAGE_STAGING; tex_desc.CPUAccessFlags = D3D11_CPU_ACCESS_READ; tex_desc.BindFlags = 0; hr = ID3D11Device_CreateTexture2D(p->device, &tex_desc, NULL, &decoder->staging); if (FAILED(hr)) { MP_ERR(p, "Failed to create staging texture: %s\n", mp_HRESULT_to_str(hr)); goto done; } } // pool to hold the mp_image wrapped surfaces decoder->pool = talloc_steal(decoder, mp_image_pool_new(n_surfaces)); // array of the same surfaces (needed by ffmpeg) ID3D11VideoDecoderOutputView **surfaces = talloc_array_ptrtype(decoder->pool, surfaces, n_surfaces); D3D11_VIDEO_DECODER_OUTPUT_VIEW_DESC view_desc = { .DecodeProfile = *fmt.guid, .ViewDimension = D3D11_VDOV_DIMENSION_TEXTURE2D, }; for (int i = 0; i < n_surfaces; i++) { ID3D11VideoDecoderOutputView **surface = &surfaces[i]; view_desc.Texture2D.ArraySlice = i; hr = ID3D11VideoDevice_CreateVideoDecoderOutputView( p->video_dev, (ID3D11Resource *)texture, &view_desc, surface); if (FAILED(hr)) { MP_ERR(p, "Failed getting decoder output view %d: %s\n", i, mp_HRESULT_to_str(hr)); goto done; } struct mp_image *img = d3d11va_new_ref(*surface, w, h); ID3D11VideoDecoderOutputView_Release(*surface); // transferred to img if (!img) { MP_ERR(p, "Failed to create D3D11VA image %d\n", i); goto done; } mp_image_pool_add(decoder->pool, img); // transferred to pool } D3D11_VIDEO_DECODER_DESC decoder_desc = { .Guid = *fmt.guid, .SampleWidth = w, .SampleHeight = h, .OutputFormat = fmt.format->dxfmt, }; UINT n_cfg; hr = ID3D11VideoDevice_GetVideoDecoderConfigCount(p->video_dev, &decoder_desc, &n_cfg); if (FAILED(hr)) { MP_ERR(p, "Failed to get number of decoder configurations: %s)", mp_HRESULT_to_str(hr)); goto done; } // pick the config with the highest score D3D11_VIDEO_DECODER_CONFIG *decoder_config = talloc_zero(decoder, D3D11_VIDEO_DECODER_CONFIG); unsigned max_score = 0; for (UINT i = 0; i < n_cfg; i++) { D3D11_VIDEO_DECODER_CONFIG cfg; hr = ID3D11VideoDevice_GetVideoDecoderConfig(p->video_dev, &decoder_desc, i, &cfg); if (FAILED(hr)) { MP_ERR(p, "Failed to get decoder config %d: %s\n", i, mp_HRESULT_to_str(hr)); goto done; } unsigned score = d3d_decoder_config_score( s, &cfg.guidConfigBitstreamEncryption, cfg.ConfigBitstreamRaw); if (score > max_score) { max_score = score; *decoder_config = cfg; } } if (!max_score) { MP_ERR(p, "Failed to find a suitable decoder configuration\n"); goto done; } hr = ID3D11VideoDevice_CreateVideoDecoder(p->video_dev, &decoder_desc, decoder_config, &decoder->decoder); if (FAILED(hr)) { MP_ERR(p, "Failed to create video decoder: %s\n", mp_HRESULT_to_str(hr)); goto done; } struct AVD3D11VAContext *avd3d11va_ctx = s->avctx->hwaccel_context; avd3d11va_ctx->decoder = decoder->decoder; avd3d11va_ctx->video_context = p->video_ctx; avd3d11va_ctx->cfg = decoder_config; avd3d11va_ctx->surface_count = n_surfaces; avd3d11va_ctx->surface = surfaces; avd3d11va_ctx->workaround = is_clearvideo(fmt.guid) ? FF_DXVA2_WORKAROUND_INTEL_CLEARVIDEO : 0; p->decoder = talloc_steal(NULL, decoder); ret = 0; done: // still referenced by pool images / surfaces if (texture) ID3D11Texture2D_Release(texture); talloc_free(tmp); return ret; } static void destroy_device(struct lavc_ctx *s) { struct priv *p = s->hwdec_priv; if (p->device) ID3D11Device_Release(p->device); if (p->device_ctx) ID3D11DeviceContext_Release(p->device_ctx); } static bool create_device(struct lavc_ctx *s, BOOL thread_safe) { HRESULT hr; struct priv *p = s->hwdec_priv; d3d_load_dlls(); if (!d3d11_dll) { MP_ERR(p, "Failed to load D3D11 library\n"); return false; } PFN_D3D11_CREATE_DEVICE CreateDevice = (void *)GetProcAddress(d3d11_dll, "D3D11CreateDevice"); if (!CreateDevice) { MP_ERR(p, "Failed to get D3D11CreateDevice symbol from DLL: %s\n", mp_LastError_to_str()); return false; } hr = CreateDevice(NULL, D3D_DRIVER_TYPE_HARDWARE, NULL, D3D11_CREATE_DEVICE_VIDEO_SUPPORT, NULL, 0, D3D11_SDK_VERSION, &p->device, NULL, &p->device_ctx); if (FAILED(hr)) { MP_ERR(p, "Failed to create D3D11 Device: %s\n", mp_HRESULT_to_str(hr)); return false; } ID3D10Multithread *multithread; hr = ID3D11Device_QueryInterface(p->device, &IID_ID3D10Multithread, (void **)&multithread); if (FAILED(hr)) { MP_ERR(p, "Failed to get Multithread interface: %s\n", mp_HRESULT_to_str(hr)); return false; } ID3D10Multithread_SetMultithreadProtected(multithread, thread_safe); ID3D10Multithread_Release(multithread); return true; } static void d3d11va_uninit(struct lavc_ctx *s) { struct priv *p = s->hwdec_priv; if (!p) return; talloc_free(p->decoder); av_freep(&s->avctx->hwaccel_context); if (p->video_dev) ID3D11VideoDevice_Release(p->video_dev); if (p->video_ctx) ID3D11VideoContext_Release(p->video_ctx); destroy_device(s); TA_FREEP(&s->hwdec_priv); } static int d3d11va_init(struct lavc_ctx *s) { HRESULT hr; struct priv *p = talloc_zero(NULL, struct priv); if (!p) return -1; s->hwdec_priv = p; p->log = mp_log_new(s, s->log, "d3d11va"); if (s->hwdec->type == HWDEC_D3D11VA_COPY) { mp_check_gpu_memcpy(p->log, NULL); p->sw_pool = talloc_steal(p, mp_image_pool_new(17)); } p->device = hwdec_devices_load(s->hwdec_devs, s->hwdec->type); if (p->device) { ID3D11Device_AddRef(p->device); ID3D11Device_GetImmediateContext(p->device, &p->device_ctx); if (!p->device_ctx) goto fail; MP_VERBOSE(p, "Using VO-supplied device %p.\n", p->device); } else if (s->hwdec->type == HWDEC_D3D11VA) { MP_ERR(p, "No Direct3D device provided for native d3d11 decoding\n"); goto fail; } else { if (!create_device(s, FALSE)) goto fail; } hr = ID3D11DeviceContext_QueryInterface(p->device_ctx, &IID_ID3D11VideoContext, (void **)&p->video_ctx); if (FAILED(hr)) { MP_ERR(p, "Failed to get VideoContext interface: %s\n", mp_HRESULT_to_str(hr)); goto fail; } hr = ID3D11Device_QueryInterface(p->device, &IID_ID3D11VideoDevice, (void **)&p->video_dev); if (FAILED(hr)) { MP_ERR(p, "Failed to get VideoDevice interface. %s\n", mp_HRESULT_to_str(hr)); goto fail; } s->avctx->hwaccel_context = av_d3d11va_alloc_context(); if (!s->avctx->hwaccel_context) { MP_ERR(p, "Failed to allocate hwaccel_context\n"); goto fail; } return 0; fail: d3d11va_uninit(s); return -1; } static int d3d11va_probe(struct lavc_ctx *ctx, struct vd_lavc_hwdec *hwdec, const char *codec) { // d3d11va-copy can do without external context; dxva2 requires it. if (hwdec->type != HWDEC_D3D11VA_COPY) { if (!hwdec_devices_load(ctx->hwdec_devs, HWDEC_D3D11VA)) return HWDEC_ERR_NO_CTX; } return d3d_probe_codec(codec); } const struct vd_lavc_hwdec mp_vd_lavc_d3d11va = { .type = HWDEC_D3D11VA, .image_format = IMGFMT_D3D11VA, .probe = d3d11va_probe, .init = d3d11va_init, .uninit = d3d11va_uninit, .init_decoder = d3d11va_init_decoder, .allocate_image = d3d11va_allocate_image, .process_image = d3d11va_update_image_attribs, }; const struct vd_lavc_hwdec mp_vd_lavc_d3d11va_copy = { .type = HWDEC_D3D11VA_COPY, .copying = true, .image_format = IMGFMT_D3D11VA, .probe = d3d11va_probe, .init = d3d11va_init, .uninit = d3d11va_uninit, .init_decoder = d3d11va_init_decoder, .allocate_image = d3d11va_allocate_image, .process_image = d3d11va_retrieve_image, .delay_queue = HWDEC_DELAY_QUEUE_COUNT, };
static int recreate_video_proc(struct vf_instance *vf) { struct vf_priv_s *p = vf->priv; HRESULT hr; destroy_video_proc(vf); D3D11_VIDEO_PROCESSOR_CONTENT_DESC vpdesc = { .InputFrameFormat = p->d3d_frame_format, .InputWidth = p->c_w, .InputHeight = p->c_h, .OutputWidth = p->params.w, .OutputHeight = p->params.h, }; hr = ID3D11VideoDevice_CreateVideoProcessorEnumerator(p->video_dev, &vpdesc, &p->vp_enum); if (FAILED(hr)) goto fail; D3D11_VIDEO_PROCESSOR_CAPS caps; hr = ID3D11VideoProcessorEnumerator_GetVideoProcessorCaps(p->vp_enum, &caps); if (FAILED(hr)) goto fail; MP_VERBOSE(vf, "Found %d rate conversion caps. Looking for caps=0x%x.\n", (int)caps.RateConversionCapsCount, p->mode); int rindex = -1; for (int n = 0; n < caps.RateConversionCapsCount; n++) { D3D11_VIDEO_PROCESSOR_RATE_CONVERSION_CAPS rcaps; hr = ID3D11VideoProcessorEnumerator_GetVideoProcessorRateConversionCaps (p->vp_enum, n, &rcaps); if (FAILED(hr)) goto fail; MP_VERBOSE(vf, " - %d: 0x%08x\n", n, (unsigned)rcaps.ProcessorCaps); if (rcaps.ProcessorCaps & p->mode) { MP_VERBOSE(vf, " (matching)\n"); if (rindex < 0) rindex = n; } } if (rindex < 0) { MP_WARN(vf, "No fitting video processor found, picking #0.\n"); rindex = 0; } // TOOD: so, how do we select which rate conversion mode the processor uses? hr = ID3D11VideoDevice_CreateVideoProcessor(p->video_dev, p->vp_enum, rindex, &p->video_proc); if (FAILED(hr)) { MP_ERR(vf, "Failed to create D3D11 video processor.\n"); goto fail; } // Note: libavcodec does not support cropping left/top with hwaccel. RECT src_rc = { .right = p->params.w, .bottom = p->params.h, }; ID3D11VideoContext_VideoProcessorSetStreamSourceRect(p->video_ctx, p->video_proc, 0, TRUE, &src_rc); // This is supposed to stop drivers from f*****g up the video quality. ID3D11VideoContext_VideoProcessorSetStreamAutoProcessingMode(p->video_ctx, p->video_proc, 0, FALSE); ID3D11VideoContext_VideoProcessorSetStreamOutputRate(p->video_ctx, p->video_proc, 0, D3D11_VIDEO_PROCESSOR_OUTPUT_RATE_NORMAL, FALSE, 0); D3D11_VIDEO_PROCESSOR_COLOR_SPACE csp = { .YCbCr_Matrix = p->params.color.space != MP_CSP_BT_601, .Nominal_Range = p->params.color.levels == MP_CSP_LEVELS_TV ? 1 : 2, }; ID3D11VideoContext_VideoProcessorSetStreamColorSpace(p->video_ctx, p->video_proc, 0, &csp); if (p->out_rgb) { if (p->params.color.space != MP_CSP_BT_601 && p->params.color.space != MP_CSP_BT_709) { MP_WARN(vf, "Unsupported video colorspace (%s/%s). Consider " "disabling hardware decoding, or using " "--hwdec=d3d11va-copy to get correct output.\n", m_opt_choice_str(mp_csp_names, p->params.color.space), m_opt_choice_str(mp_csp_levels_names, p->params.color.levels)); } } else { ID3D11VideoContext_VideoProcessorSetOutputColorSpace(p->video_ctx, p->video_proc, &csp); } return 0; fail: destroy_video_proc(vf); return -1; } static int render(struct vf_instance *vf) { struct vf_priv_s *p = vf->priv; int res = -1; HRESULT hr; ID3D11VideoProcessorInputView *in_view = NULL; ID3D11VideoProcessorOutputView *out_view = NULL; struct mp_image *in = NULL, *out = NULL; out = mp_image_pool_get(p->pool, p->out_params.imgfmt, p->params.w, p->params.h); if (!out) goto cleanup; ID3D11Texture2D *d3d_out_tex = (void *)out->planes[1]; in = mp_refqueue_get(p->queue, 0); if (!in) goto cleanup; ID3D11Texture2D *d3d_tex = (void *)in->planes[1]; int d3d_subindex = (intptr_t)in->planes[2]; mp_image_copy_attributes(out, in); D3D11_VIDEO_FRAME_FORMAT d3d_frame_format; if (!mp_refqueue_should_deint(p->queue)) { d3d_frame_format = D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE; } else if (mp_refqueue_top_field_first(p->queue)) { d3d_frame_format = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_TOP_FIELD_FIRST; } else { d3d_frame_format = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_BOTTOM_FIELD_FIRST; } D3D11_TEXTURE2D_DESC texdesc; ID3D11Texture2D_GetDesc(d3d_tex, &texdesc); if (!p->video_proc || p->c_w != texdesc.Width || p->c_h != texdesc.Height || p->d3d_frame_format != d3d_frame_format) { p->c_w = texdesc.Width; p->c_h = texdesc.Height; p->d3d_frame_format = d3d_frame_format; if (recreate_video_proc(vf) < 0) goto cleanup; } if (!mp_refqueue_should_deint(p->queue)) { d3d_frame_format = D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE; } else if (mp_refqueue_is_top_field(p->queue)) { d3d_frame_format = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_TOP_FIELD_FIRST; } else { d3d_frame_format = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_BOTTOM_FIELD_FIRST; } ID3D11VideoContext_VideoProcessorSetStreamFrameFormat(p->video_ctx, p->video_proc, 0, d3d_frame_format); D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC indesc = { .ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D, .Texture2D = { .ArraySlice = d3d_subindex, }, }; hr = ID3D11VideoDevice_CreateVideoProcessorInputView(p->video_dev, (ID3D11Resource *)d3d_tex, p->vp_enum, &indesc, &in_view); if (FAILED(hr)) { MP_ERR(vf, "Could not create ID3D11VideoProcessorInputView\n"); goto cleanup; } D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC outdesc = { .ViewDimension = D3D11_VPOV_DIMENSION_TEXTURE2D, }; hr = ID3D11VideoDevice_CreateVideoProcessorOutputView(p->video_dev, (ID3D11Resource *)d3d_out_tex, p->vp_enum, &outdesc, &out_view); if (FAILED(hr)) goto cleanup; D3D11_VIDEO_PROCESSOR_STREAM stream = { .Enable = TRUE, .pInputSurface = in_view, }; int frame = mp_refqueue_is_second_field(p->queue); hr = ID3D11VideoContext_VideoProcessorBlt(p->video_ctx, p->video_proc, out_view, frame, 1, &stream); if (FAILED(hr)) { MP_ERR(vf, "VideoProcessorBlt failed.\n"); goto cleanup; } res = 0; cleanup: if (in_view) ID3D11VideoProcessorInputView_Release(in_view); if (out_view) ID3D11VideoProcessorOutputView_Release(out_view); if (res >= 0) { vf_add_output_frame(vf, out); } else { talloc_free(out); } mp_refqueue_next_field(p->queue); return res; } static int filter_out(struct vf_instance *vf) { struct vf_priv_s *p = vf->priv; if (!mp_refqueue_has_output(p->queue)) return 0; // no filtering if (!mp_refqueue_should_deint(p->queue) && !p->require_filtering) { struct mp_image *in = mp_image_new_ref(mp_refqueue_get(p->queue, 0)); if (!in) return -1; mp_image_set_params(in, &p->out_params); vf_add_output_frame(vf, in); mp_refqueue_next(p->queue); return 0; } return render(vf); } static int reconfig(struct vf_instance *vf, struct mp_image_params *in, struct mp_image_params *out) { struct vf_priv_s *p = vf->priv; flush_frames(vf); talloc_free(p->pool); p->pool = NULL; destroy_video_proc(vf); *out = *in; if (vf_next_query_format(vf, IMGFMT_D3D11VA) || vf_next_query_format(vf, IMGFMT_D3D11NV12)) { out->imgfmt = vf_next_query_format(vf, IMGFMT_D3D11VA) ? IMGFMT_D3D11VA : IMGFMT_D3D11NV12; out->hw_subfmt = IMGFMT_NV12; p->out_format = DXGI_FORMAT_NV12; p->out_shared = false; p->out_rgb = false; } else { out->imgfmt = IMGFMT_D3D11RGB; out->hw_subfmt = IMGFMT_RGB0; p->out_format = DXGI_FORMAT_B8G8R8A8_UNORM; p->out_shared = true; p->out_rgb = true; } p->require_filtering = in->hw_subfmt != out->hw_subfmt; p->params = *in; p->out_params = *out; p->pool = mp_image_pool_new(20); mp_image_pool_set_allocator(p->pool, alloc_pool, vf); mp_image_pool_set_lru(p->pool); return 0; } static void uninit(struct vf_instance *vf) { struct vf_priv_s *p = vf->priv; destroy_video_proc(vf); flush_frames(vf); mp_refqueue_free(p->queue); talloc_free(p->pool); if (p->video_ctx) ID3D11VideoContext_Release(p->video_ctx); if (p->video_dev) ID3D11VideoDevice_Release(p->video_dev); if (p->device_ctx) ID3D11DeviceContext_Release(p->device_ctx); if (p->vo_dev) ID3D11Device_Release(p->vo_dev); } static int query_format(struct vf_instance *vf, unsigned int imgfmt) { if (imgfmt == IMGFMT_D3D11VA || imgfmt == IMGFMT_D3D11NV12 || imgfmt == IMGFMT_D3D11RGB) { return vf_next_query_format(vf, IMGFMT_D3D11VA) || vf_next_query_format(vf, IMGFMT_D3D11NV12) || vf_next_query_format(vf, IMGFMT_D3D11RGB); } return 0; } static bool test_conversion(int in, int out) { return (in == IMGFMT_D3D11VA || in == IMGFMT_D3D11NV12 || in == IMGFMT_D3D11RGB) && (out == IMGFMT_D3D11VA || out == IMGFMT_D3D11NV12 || out == IMGFMT_D3D11RGB); } static int control(struct vf_instance *vf, int request, void* data) { struct vf_priv_s *p = vf->priv; switch (request){ case VFCTRL_GET_DEINTERLACE: *(int*)data = !!p->deint_enabled; return true; case VFCTRL_SET_DEINTERLACE: p->deint_enabled = !!*(int*)data; return true; case VFCTRL_SEEK_RESET: flush_frames(vf); return true; default: return CONTROL_UNKNOWN; } } static int vf_open(vf_instance_t *vf) { struct vf_priv_s *p = vf->priv; vf->reconfig = reconfig; vf->filter_ext = filter_ext; vf->filter_out = filter_out; vf->query_format = query_format; vf->uninit = uninit; vf->control = control; p->queue = mp_refqueue_alloc(); p->vo_dev = hwdec_devices_load(vf->hwdec_devs, HWDEC_D3D11VA); if (!p->vo_dev) return 0; ID3D11Device_AddRef(p->vo_dev); HRESULT hr; hr = ID3D11Device_QueryInterface(p->vo_dev, &IID_ID3D11VideoDevice, (void **)&p->video_dev); if (FAILED(hr)) goto fail; ID3D11Device_GetImmediateContext(p->vo_dev, &p->device_ctx); if (!p->device_ctx) goto fail; hr = ID3D11DeviceContext_QueryInterface(p->device_ctx, &IID_ID3D11VideoContext, (void **)&p->video_ctx); if (FAILED(hr)) goto fail; return 1; fail: uninit(vf); return 0; } #define OPT_BASE_STRUCT struct vf_priv_s static const m_option_t vf_opts_fields[] = { OPT_FLAG("deint", deint_enabled, 0), OPT_FLAG("interlaced-only", interlaced_only, 0), OPT_CHOICE("mode", mode, 0, ({"blend", D3D11_VIDEO_PROCESSOR_PROCESSOR_CAPS_DEINTERLACE_BLEND}, {"bob", D3D11_VIDEO_PROCESSOR_PROCESSOR_CAPS_DEINTERLACE_BOB}, {"adaptive", D3D11_VIDEO_PROCESSOR_PROCESSOR_CAPS_DEINTERLACE_ADAPTIVE}, {"mocomp", D3D11_VIDEO_PROCESSOR_PROCESSOR_CAPS_DEINTERLACE_MOTION_COMPENSATION},
int main(void) { struct nk_context *ctx; struct nk_color background; WNDCLASSW wc; RECT rect = { 0, 0, WINDOW_WIDTH, WINDOW_HEIGHT }; DWORD style = WS_OVERLAPPEDWINDOW; DWORD exstyle = WS_EX_APPWINDOW; HWND wnd; int running = 1; HRESULT hr; D3D_FEATURE_LEVEL feature_level; DXGI_SWAP_CHAIN_DESC swap_chain_desc; /* Win32 */ memset(&wc, 0, sizeof(wc)); wc.lpfnWndProc = WindowProc; wc.hInstance = GetModuleHandleW(0); wc.hIcon = LoadIcon(NULL, IDI_APPLICATION); wc.hCursor = LoadCursor(NULL, IDC_ARROW); wc.lpszClassName = L"NuklearWindowClass"; RegisterClassW(&wc); AdjustWindowRectEx(&rect, style, FALSE, exstyle); wnd = CreateWindowExW(exstyle, wc.lpszClassName, L"Nuklear Demo", style | WS_VISIBLE, CW_USEDEFAULT, CW_USEDEFAULT, rect.right - rect.left, rect.bottom - rect.top, NULL, NULL, wc.hInstance, NULL); /* D3D11 setup */ memset(&swap_chain_desc, 0, sizeof(swap_chain_desc)); swap_chain_desc.BufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM; swap_chain_desc.BufferDesc.RefreshRate.Numerator = 60; swap_chain_desc.BufferDesc.RefreshRate.Denominator = 1; swap_chain_desc.SampleDesc.Count = 1; swap_chain_desc.SampleDesc.Quality = 0; swap_chain_desc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT; swap_chain_desc.BufferCount = 1; swap_chain_desc.OutputWindow = wnd; swap_chain_desc.Windowed = TRUE; swap_chain_desc.SwapEffect = DXGI_SWAP_EFFECT_DISCARD; swap_chain_desc.Flags = 0; if (FAILED(D3D11CreateDeviceAndSwapChain(NULL, D3D_DRIVER_TYPE_HARDWARE, NULL, 0, NULL, 0, D3D11_SDK_VERSION, &swap_chain_desc, &swap_chain, &device, &feature_level, &context))) { /* if hardware device fails, then try WARP high-performance software rasterizer, this is useful for RDP sessions */ hr = D3D11CreateDeviceAndSwapChain(NULL, D3D_DRIVER_TYPE_WARP, NULL, 0, NULL, 0, D3D11_SDK_VERSION, &swap_chain_desc, &swap_chain, &device, &feature_level, &context); assert(SUCCEEDED(hr)); } set_swap_chain_size(WINDOW_WIDTH, WINDOW_HEIGHT); /* GUI */ ctx = nk_d3d11_init(device, WINDOW_WIDTH, WINDOW_HEIGHT, MAX_VERTEX_BUFFER, MAX_INDEX_BUFFER); /* Load Fonts: if none of these are loaded a default font will be used */ {struct nk_font_atlas *atlas; nk_d3d11_font_stash_begin(&atlas); /*struct nk_font *droid = nk_font_atlas_add_from_file(atlas, "../../../extra_font/DroidSans.ttf", 14, 0);*/ /*struct nk_font *robot = nk_font_atlas_add_from_file(atlas, "../../../extra_font/Robot-Regular.ttf", 14, 0);*/ /*struct nk_font *future = nk_font_atlas_add_from_file(atlas, "../../../extra_font/kenvector_future_thin.ttf", 13, 0);*/ /*struct nk_font *clean = nk_font_atlas_add_from_file(atlas, "../../../extra_font/ProggyClean.ttf", 12, 0);*/ /*struct nk_font *tiny = nk_font_atlas_add_from_file(atlas, "../../../extra_font/ProggyTiny.ttf", 10, 0);*/ /*struct nk_font *cousine = nk_font_atlas_add_from_file(atlas, "../../../extra_font/Cousine-Regular.ttf", 13, 0);*/ nk_d3d11_font_stash_end(); /*nk_style_set_font(ctx, &droid->handle)*/;} /* style.c */ /*set_style(ctx, THEME_WHITE);*/ /*set_style(ctx, THEME_RED);*/ /*set_style(ctx, THEME_BLUE);*/ /*set_style(ctx, THEME_DARK);*/ background = nk_rgb(28,48,62); while (running) { /* Input */ MSG msg; nk_input_begin(ctx); while (PeekMessageW(&msg, NULL, 0, 0, PM_REMOVE)) { if (msg.message == WM_QUIT) running = 0; TranslateMessage(&msg); DispatchMessageW(&msg); } nk_input_end(ctx); /* GUI */ {struct nk_panel layout; if (nk_begin(ctx, &layout, "Demo", nk_rect(50, 50, 230, 250), NK_WINDOW_BORDER|NK_WINDOW_MOVABLE|NK_WINDOW_SCALABLE| NK_WINDOW_MINIMIZABLE|NK_WINDOW_TITLE)) { enum {EASY, HARD}; static int op = EASY; static int property = 20; nk_layout_row_static(ctx, 30, 80, 1); if (nk_button_label(ctx, "button", NK_BUTTON_DEFAULT)) fprintf(stdout, "button pressed\n"); nk_layout_row_dynamic(ctx, 30, 2); if (nk_option_label(ctx, "easy", op == EASY)) op = EASY; if (nk_option_label(ctx, "hard", op == HARD)) op = HARD; nk_layout_row_dynamic(ctx, 22, 1); nk_property_int(ctx, "Compression:", 0, &property, 100, 10, 1); {struct nk_panel combo; nk_layout_row_dynamic(ctx, 20, 1); nk_label(ctx, "background:", NK_TEXT_LEFT); nk_layout_row_dynamic(ctx, 25, 1); if (nk_combo_begin_color(ctx, &combo, background, 400)) { nk_layout_row_dynamic(ctx, 120, 1); background = nk_color_picker(ctx, background, NK_RGBA); nk_layout_row_dynamic(ctx, 25, 1); background.r = (nk_byte)nk_propertyi(ctx, "#R:", 0, background.r, 255, 1,1); background.g = (nk_byte)nk_propertyi(ctx, "#G:", 0, background.g, 255, 1,1); background.b = (nk_byte)nk_propertyi(ctx, "#B:", 0, background.b, 255, 1,1); background.a = (nk_byte)nk_propertyi(ctx, "#A:", 0, background.a, 255, 1,1); nk_combo_end(ctx); }} } nk_end(ctx);} if (nk_window_is_closed(ctx, "Demo")) break; /* -------------- EXAMPLES ---------------- */ /*calculator(ctx);*/ /*overview(ctx);*/ /*node_editor(ctx);*/ /* ----------------------------------------- */ {/* Draw */ float bg[4]; nk_color_fv(bg, background); ID3D11DeviceContext_ClearRenderTargetView(context, rt_view, bg); ID3D11DeviceContext_OMSetRenderTargets(context, 1, &rt_view, NULL); nk_d3d11_render(context, NK_ANTI_ALIASING_ON); hr = IDXGISwapChain_Present(swap_chain, 1, 0); if (hr == DXGI_ERROR_DEVICE_RESET || hr == DXGI_ERROR_DEVICE_REMOVED) { /* to recover from this, you'll need to recreate device and all the resources */ MessageBoxW(NULL, L"D3D11 device is lost or removed!", L"Error", 0); break; } else if (hr == DXGI_STATUS_OCCLUDED) { /* window is not visible, so vsync won't work. Let's sleep a bit to reduce CPU usage */ Sleep(10); } assert(SUCCEEDED(hr));} } ID3D11DeviceContext_ClearState(context); nk_d3d11_shutdown(); ID3D11ShaderResourceView_Release(rt_view); ID3D11DeviceContext_Release(context); ID3D11Device_Release(device); IDXGISwapChain_Release(swap_chain); UnregisterClassW(wc.lpszClassName, wc.hInstance); return 0; }
static int recreate_video_proc(struct mp_filter *vf) { struct priv *p = vf->priv; HRESULT hr; destroy_video_proc(vf); D3D11_VIDEO_PROCESSOR_CONTENT_DESC vpdesc = { .InputFrameFormat = p->d3d_frame_format, .InputWidth = p->c_w, .InputHeight = p->c_h, .OutputWidth = p->params.w, .OutputHeight = p->params.h, }; hr = ID3D11VideoDevice_CreateVideoProcessorEnumerator(p->video_dev, &vpdesc, &p->vp_enum); if (FAILED(hr)) goto fail; D3D11_VIDEO_PROCESSOR_CAPS caps; hr = ID3D11VideoProcessorEnumerator_GetVideoProcessorCaps(p->vp_enum, &caps); if (FAILED(hr)) goto fail; MP_VERBOSE(vf, "Found %d rate conversion caps. Looking for caps=0x%x.\n", (int)caps.RateConversionCapsCount, p->opts->mode); int rindex = -1; for (int n = 0; n < caps.RateConversionCapsCount; n++) { D3D11_VIDEO_PROCESSOR_RATE_CONVERSION_CAPS rcaps; hr = ID3D11VideoProcessorEnumerator_GetVideoProcessorRateConversionCaps (p->vp_enum, n, &rcaps); if (FAILED(hr)) goto fail; MP_VERBOSE(vf, " - %d: 0x%08x\n", n, (unsigned)rcaps.ProcessorCaps); if (rcaps.ProcessorCaps & p->opts->mode) { MP_VERBOSE(vf, " (matching)\n"); if (rindex < 0) rindex = n; } } if (rindex < 0) { MP_WARN(vf, "No fitting video processor found, picking #0.\n"); rindex = 0; } // TOOD: so, how do we select which rate conversion mode the processor uses? hr = ID3D11VideoDevice_CreateVideoProcessor(p->video_dev, p->vp_enum, rindex, &p->video_proc); if (FAILED(hr)) { MP_ERR(vf, "Failed to create D3D11 video processor.\n"); goto fail; } // Note: libavcodec does not support cropping left/top with hwaccel. RECT src_rc = { .right = p->params.w, .bottom = p->params.h, }; ID3D11VideoContext_VideoProcessorSetStreamSourceRect(p->video_ctx, p->video_proc, 0, TRUE, &src_rc); // This is supposed to stop drivers from f*****g up the video quality. ID3D11VideoContext_VideoProcessorSetStreamAutoProcessingMode(p->video_ctx, p->video_proc, 0, FALSE); ID3D11VideoContext_VideoProcessorSetStreamOutputRate(p->video_ctx, p->video_proc, 0, D3D11_VIDEO_PROCESSOR_OUTPUT_RATE_NORMAL, FALSE, 0); D3D11_VIDEO_PROCESSOR_COLOR_SPACE csp = { .YCbCr_Matrix = p->params.color.space != MP_CSP_BT_601, .Nominal_Range = p->params.color.levels == MP_CSP_LEVELS_TV ? 1 : 2, }; ID3D11VideoContext_VideoProcessorSetStreamColorSpace(p->video_ctx, p->video_proc, 0, &csp); if (p->out_rgb) { if (p->params.color.space != MP_CSP_BT_601 && p->params.color.space != MP_CSP_BT_709) { MP_WARN(vf, "Unsupported video colorspace (%s/%s). Consider " "disabling hardware decoding, or using " "--hwdec=d3d11va-copy to get correct output.\n", m_opt_choice_str(mp_csp_names, p->params.color.space), m_opt_choice_str(mp_csp_levels_names, p->params.color.levels)); } } else { ID3D11VideoContext_VideoProcessorSetOutputColorSpace(p->video_ctx, p->video_proc, &csp); } return 0; fail: destroy_video_proc(vf); return -1; } static struct mp_image *render(struct mp_filter *vf) { struct priv *p = vf->priv; int res = -1; HRESULT hr; ID3D11VideoProcessorInputView *in_view = NULL; ID3D11VideoProcessorOutputView *out_view = NULL; struct mp_image *in = NULL, *out = NULL; out = mp_image_pool_get(p->pool, IMGFMT_D3D11, p->params.w, p->params.h); if (!out) { MP_WARN(vf, "failed to allocate frame\n"); goto cleanup; } ID3D11Texture2D *d3d_out_tex = (void *)out->planes[0]; in = mp_refqueue_get(p->queue, 0); if (!in) goto cleanup; ID3D11Texture2D *d3d_tex = (void *)in->planes[0]; int d3d_subindex = (intptr_t)in->planes[1]; mp_image_copy_attributes(out, in); D3D11_VIDEO_FRAME_FORMAT d3d_frame_format; if (!mp_refqueue_should_deint(p->queue)) { d3d_frame_format = D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE; } else if (mp_refqueue_top_field_first(p->queue)) { d3d_frame_format = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_TOP_FIELD_FIRST; } else { d3d_frame_format = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_BOTTOM_FIELD_FIRST; } D3D11_TEXTURE2D_DESC texdesc; ID3D11Texture2D_GetDesc(d3d_tex, &texdesc); if (!p->video_proc || p->c_w != texdesc.Width || p->c_h != texdesc.Height || p->d3d_frame_format != d3d_frame_format) { p->c_w = texdesc.Width; p->c_h = texdesc.Height; p->d3d_frame_format = d3d_frame_format; if (recreate_video_proc(vf) < 0) goto cleanup; } if (!mp_refqueue_should_deint(p->queue)) { d3d_frame_format = D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE; } else if (mp_refqueue_is_top_field(p->queue)) { d3d_frame_format = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_TOP_FIELD_FIRST; } else { d3d_frame_format = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_BOTTOM_FIELD_FIRST; } ID3D11VideoContext_VideoProcessorSetStreamFrameFormat(p->video_ctx, p->video_proc, 0, d3d_frame_format); D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC indesc = { .ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D, .Texture2D = { .ArraySlice = d3d_subindex, }, }; hr = ID3D11VideoDevice_CreateVideoProcessorInputView(p->video_dev, (ID3D11Resource *)d3d_tex, p->vp_enum, &indesc, &in_view); if (FAILED(hr)) { MP_ERR(vf, "Could not create ID3D11VideoProcessorInputView\n"); goto cleanup; } D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC outdesc = { .ViewDimension = D3D11_VPOV_DIMENSION_TEXTURE2D, }; hr = ID3D11VideoDevice_CreateVideoProcessorOutputView(p->video_dev, (ID3D11Resource *)d3d_out_tex, p->vp_enum, &outdesc, &out_view); if (FAILED(hr)) { MP_ERR(vf, "Could not create ID3D11VideoProcessorOutputView\n"); goto cleanup; } D3D11_VIDEO_PROCESSOR_STREAM stream = { .Enable = TRUE, .pInputSurface = in_view, }; int frame = mp_refqueue_is_second_field(p->queue); hr = ID3D11VideoContext_VideoProcessorBlt(p->video_ctx, p->video_proc, out_view, frame, 1, &stream); if (FAILED(hr)) { MP_ERR(vf, "VideoProcessorBlt failed.\n"); goto cleanup; } res = 0; cleanup: if (in_view) ID3D11VideoProcessorInputView_Release(in_view); if (out_view) ID3D11VideoProcessorOutputView_Release(out_view); if (res < 0) TA_FREEP(&out); return out; } static bool vo_supports(struct priv *p, int subfmt) { for (int n = 0; p->vo_formats && p->vo_formats[n]; n++) { if (p->vo_formats[n] == subfmt) return true; } return false; } static void vf_d3d11vpp_process(struct mp_filter *vf) { struct priv *p = vf->priv; struct mp_image *in_fmt = mp_refqueue_execute_reinit(p->queue); if (in_fmt) { mp_image_pool_clear(p->pool); destroy_video_proc(vf); p->params = in_fmt->params; p->out_params = p->params; if (vo_supports(p, IMGFMT_NV12)) { p->out_params.hw_subfmt = IMGFMT_NV12; p->out_format = DXGI_FORMAT_NV12; p->out_shared = false; p->out_rgb = false; } else { p->out_params.hw_subfmt = IMGFMT_RGB0; p->out_format = DXGI_FORMAT_B8G8R8A8_UNORM; p->out_shared = true; p->out_rgb = true; } p->out_params.hw_flags = 0; p->require_filtering = p->params.hw_subfmt != p->out_params.hw_subfmt; } if (!mp_refqueue_can_output(p->queue)) return; if (!mp_refqueue_should_deint(p->queue) && !p->require_filtering) { // no filtering struct mp_image *in = mp_image_new_ref(mp_refqueue_get(p->queue, 0)); if (!in) { mp_filter_internal_mark_failed(vf); return; } mp_refqueue_write_out_pin(p->queue, in); } else { mp_refqueue_write_out_pin(p->queue, render(vf)); } } static void uninit(struct mp_filter *vf) { struct priv *p = vf->priv; destroy_video_proc(vf); flush_frames(vf); talloc_free(p->queue); talloc_free(p->pool); if (p->video_ctx) ID3D11VideoContext_Release(p->video_ctx); if (p->video_dev) ID3D11VideoDevice_Release(p->video_dev); if (p->device_ctx) ID3D11DeviceContext_Release(p->device_ctx); if (p->vo_dev) ID3D11Device_Release(p->vo_dev); } static const struct mp_filter_info vf_d3d11vpp_filter = { .name = "d3d11vpp", .process = vf_d3d11vpp_process, .reset = flush_frames, .destroy = uninit, .priv_size = sizeof(struct priv), }; static struct mp_filter *vf_d3d11vpp_create(struct mp_filter *parent, void *options) { struct mp_filter *f = mp_filter_create(parent, &vf_d3d11vpp_filter); if (!f) { talloc_free(options); return NULL; } mp_filter_add_pin(f, MP_PIN_IN, "in"); mp_filter_add_pin(f, MP_PIN_OUT, "out"); struct priv *p = f->priv; p->opts = talloc_steal(p, options); // Special path for vf_d3d11_create_outconv(): disable all processing except // possibly surface format conversions. if (!p->opts) { static const struct opts opts = {0}; p->opts = (struct opts *)&opts; } p->queue = mp_refqueue_alloc(f); struct mp_stream_info *info = mp_filter_find_stream_info(f); if (!info || !info->hwdec_devs) goto fail; hwdec_devices_request_all(info->hwdec_devs); struct mp_hwdec_ctx *hwctx = hwdec_devices_get_by_lavc(info->hwdec_devs, AV_HWDEVICE_TYPE_D3D11VA); if (!hwctx || !hwctx->av_device_ref) goto fail; AVHWDeviceContext *avhwctx = (void *)hwctx->av_device_ref->data; AVD3D11VADeviceContext *d3dctx = avhwctx->hwctx; p->vo_dev = d3dctx->device; ID3D11Device_AddRef(p->vo_dev); p->vo_formats = hwctx->supported_formats; HRESULT hr; hr = ID3D11Device_QueryInterface(p->vo_dev, &IID_ID3D11VideoDevice, (void **)&p->video_dev); if (FAILED(hr)) goto fail; ID3D11Device_GetImmediateContext(p->vo_dev, &p->device_ctx); if (!p->device_ctx) goto fail; hr = ID3D11DeviceContext_QueryInterface(p->device_ctx, &IID_ID3D11VideoContext, (void **)&p->video_ctx); if (FAILED(hr)) goto fail; p->pool = mp_image_pool_new(f); mp_image_pool_set_allocator(p->pool, alloc_pool, f); mp_image_pool_set_lru(p->pool); mp_refqueue_add_in_format(p->queue, IMGFMT_D3D11, 0); mp_refqueue_set_refs(p->queue, 0, 0); mp_refqueue_set_mode(p->queue, (p->opts->deint_enabled ? MP_MODE_DEINT : 0) | MP_MODE_OUTPUT_FIELDS | (p->opts->interlaced_only ? MP_MODE_INTERLACED_ONLY : 0)); return f; fail: talloc_free(f); return NULL; } #define OPT_BASE_STRUCT struct opts static const m_option_t vf_opts_fields[] = { OPT_FLAG("deint", deint_enabled, 0), OPT_FLAG("interlaced-only", interlaced_only, 0), OPT_CHOICE("mode", mode, 0, ({"blend", D3D11_VIDEO_PROCESSOR_PROCESSOR_CAPS_DEINTERLACE_BLEND}, {"bob", D3D11_VIDEO_PROCESSOR_PROCESSOR_CAPS_DEINTERLACE_BOB}, {"adaptive", D3D11_VIDEO_PROCESSOR_PROCESSOR_CAPS_DEINTERLACE_ADAPTIVE}, {"mocomp", D3D11_VIDEO_PROCESSOR_PROCESSOR_CAPS_DEINTERLACE_MOTION_COMPENSATION}, {"ivctc", D3D11_VIDEO_PROCESSOR_PROCESSOR_CAPS_INVERSE_TELECINE},
HRESULT WINAPI D3D11CreateDeviceAndSwapChain(IDXGIAdapter *adapter, D3D_DRIVER_TYPE driver_type, HMODULE swrast, UINT flags, const D3D_FEATURE_LEVEL *feature_levels, UINT levels, UINT sdk_version, const DXGI_SWAP_CHAIN_DESC *swapchain_desc, IDXGISwapChain **swapchain, ID3D11Device **device_out, D3D_FEATURE_LEVEL *obtained_feature_level, ID3D11DeviceContext **immediate_context) { DXGI_SWAP_CHAIN_DESC desc; IDXGIDevice *dxgi_device; IDXGIFactory *factory; ID3D11Device *device; HRESULT hr; TRACE("adapter %p, driver_type %s, swrast %p, flags %#x, feature_levels %p, levels %u, sdk_version %u, " "swapchain_desc %p, swapchain %p, device %p, obtained_feature_level %p, immediate_context %p.\n", adapter, debug_d3d_driver_type(driver_type), swrast, flags, feature_levels, levels, sdk_version, swapchain_desc, swapchain, device_out, obtained_feature_level, immediate_context); if (swapchain) *swapchain = NULL; if (device_out) *device_out = NULL; if (FAILED(hr = D3D11CreateDevice(adapter, driver_type, swrast, flags, feature_levels, levels, sdk_version, &device, obtained_feature_level, immediate_context))) { WARN("Failed to create a device, returning %#x.\n", hr); return hr; } if (swapchain) { if (FAILED(hr = ID3D11Device_QueryInterface(device, &IID_IDXGIDevice, (void **)&dxgi_device))) { ERR("Failed to get a dxgi device from the d3d11 device, returning %#x.\n", hr); goto cleanup; } hr = IDXGIDevice_GetAdapter(dxgi_device, &adapter); IDXGIDevice_Release(dxgi_device); if (FAILED(hr)) { ERR("Failed to get the device adapter, returning %#x.\n", hr); goto cleanup; } hr = IDXGIAdapter_GetParent(adapter, &IID_IDXGIFactory, (void **)&factory); IDXGIAdapter_Release(adapter); if (FAILED(hr)) { ERR("Failed to get the adapter factory, returning %#x.\n", hr); goto cleanup; } desc = *swapchain_desc; hr = IDXGIFactory_CreateSwapChain(factory, (IUnknown *)device, &desc, swapchain); IDXGIFactory_Release(factory); if (FAILED(hr)) { WARN("Failed to create a swapchain, returning %#x.\n", hr); goto cleanup; } TRACE("Created IDXGISwapChain %p.\n", swapchain); } if (device_out) *device_out = device; else ID3D11Device_Release(device); return S_OK; cleanup: if (device) ID3D11Device_Release(device); if (obtained_feature_level) *obtained_feature_level = 0; if (immediate_context) { /* FIXME: Remove the following NULL check once the d3d11_device_GetImmediateContext() is implemented. */ if (*immediate_context) ID3D11DeviceContext_Release(*immediate_context); *immediate_context = NULL; } return hr; }