Exemple #1
0
static int query_format(struct vf_instance_s* vf, unsigned int fmt){
	switch(fmt)
	{
	case IMGFMT_YV12:
	case IMGFMT_I420:
	case IMGFMT_IYUV:
	case IMGFMT_YVU9:
	case IMGFMT_444P:
	case IMGFMT_422P:
	case IMGFMT_411P:
		return vf_next_query_format(vf, fmt);
	}
	return 0;
}
Exemple #2
0
/***
 * \param vf pointer to vf_instance
 * \param fmt image format to query for
 *
 * \returns 0 if image format in fmt is not supported
 *
 * Given the image format specified by \a fmt, this routine is called
 * to ask if the format is supported or not.
 */
static int query_format(struct vf_instance *vf, unsigned int fmt){
	VERBOSE("query_format() called\n");

	switch (fmt) {
		case IMGFMT_YV12:
		case IMGFMT_YUY2:
			/* strictly speaking the output format of
			 * this filter will be known after config(),
			 * but everything that supports IMGFMT_ZRMJPEGNI
			 * should also support all other IMGFMT_ZRMJPEG* */
			return vf_next_query_format(vf, IMGFMT_ZRMJPEGNI);
	}

	return 0;
}
Exemple #3
0
static int config(struct vf_instance *vf,
        int width, int height, int d_width, int d_height,
        unsigned int flags, unsigned int outfmt){

    if(vf_next_query_format(vf,IMGFMT_YV12)<=0){
        mp_msg(MSGT_VFILTER, MSGL_WARN, MSGTR_MPCODECS_WarnNextFilterDoesntSupport, "YV12");
        return 0;
    }

    //hmm whats the meaning of these ... ;)
    d_width= width= WIDTH;
    d_height= height= HEIGHT;

    return vf_next_config(vf,width,height,d_width,d_height,flags,IMGFMT_YV12);
}
Exemple #4
0
static int config(struct vf_instance* vf,
        int width, int height, int d_width, int d_height,
	unsigned int flags, unsigned int outfmt){

    if(vf_next_query_format(vf,IMGFMT_YV12)<=0){
	mp_tmsg(MSGT_VFILTER, MSGL_WARN, "%s not supported by next filter/vo :(\n", "YV12");
	return 0;
    }

    //hmm whats the meaning of these ... ;)
    d_width= width= WIDTH;
    d_height= height= HEIGHT;

    return vf_next_config(vf,width,height,d_width,d_height,flags,IMGFMT_YV12);
}
Exemple #5
0
static int find_best_out(vf_instance_t *vf, int in_format)
{
    int best = 0;
    for (int out_format = IMGFMT_START; out_format < IMGFMT_END; out_format++) {
        if (!vf_next_query_format(vf, out_format))
            continue;
        if (sws_isSupportedOutput(imgfmt2pixfmt(out_format)) < 1)
            continue;
        if (best) {
            int candidate = mp_imgfmt_select_best(best, out_format, in_format);
            if (candidate)
                best = candidate;
        } else {
            best = out_format;
        }
    }
    return best;
}
Exemple #6
0
static int query_format(struct vf_instance *vf, unsigned int fmt)
{
    if (!IMGFMT_IS_HWACCEL(fmt) && imgfmt2pixfmt(fmt) != AV_PIX_FMT_NONE) {
        if (sws_isSupportedInput(imgfmt2pixfmt(fmt)) < 1)
            return 0;
        unsigned int best = find_best_out(vf, fmt);
        int flags;
        if (!best)
            return 0;            // no matching out-fmt
        flags = vf_next_query_format(vf, best);
        if (!(flags & (VFCAP_CSP_SUPPORTED | VFCAP_CSP_SUPPORTED_BY_HW)))
            return 0;
        if (fmt != best)
            flags &= ~VFCAP_CSP_SUPPORTED_BY_HW;
        return flags;
    }
    return 0;   // nomatching in-fmt
}
static int query_format(struct vf_instance_s *vf, unsigned fmt) {
    switch(fmt) {
    case IMGFMT_YVU9:
    case IMGFMT_IF09:
    case IMGFMT_YV12:
    case IMGFMT_I420:
    case IMGFMT_IYUV:
    case IMGFMT_CLPL:
    case IMGFMT_Y800:
    case IMGFMT_Y8:
    case IMGFMT_NV12:
    case IMGFMT_NV21:
    case IMGFMT_444P:
    case IMGFMT_422P:
    case IMGFMT_411P:
    case IMGFMT_HM12:
        return vf_next_query_format(vf, fmt);
    }
    return 0;
}
Exemple #8
0
static int config(struct vf_instance *vf,
        int width, int height, int d_width, int d_height,
	unsigned int flags, unsigned int outfmt){
    if(vf_next_query_format(vf,IMGFMT_MPEGPES)<=0) return 0;

    lavc_venc_context.width = width;
    lavc_venc_context.height = height;

    if(!lavc_venc_context.time_base.num || !lavc_venc_context.time_base.den){
	// guess FPS:
	switch(height){
	case 240:
	case 480:
	    lavc_venc_context.time_base= (AVRational){1001,30000};
	    break;
	case 576:
	case 288:
	default:
	    lavc_venc_context.time_base= (AVRational){1,25};
	    break;
//	    lavc_venc_context.frame_rate=vo_fps*FRAME_RATE_BASE; // same as src
	}
    }

    free(vf->priv->outbuf);

    vf->priv->outbuf_size=10000+width*height;  // must be enough!
    vf->priv->outbuf = malloc(vf->priv->outbuf_size);

    if (avcodec_open(&lavc_venc_context, vf->priv->codec) != 0) {
	mp_msg(MSGT_MENCODER,MSGL_ERR,MSGTR_CantOpenCodec);
	return 0;
    }

    if (lavc_venc_context.codec->encode == NULL) {
	mp_msg(MSGT_MENCODER,MSGL_ERR,"avcodec init failed (ctx->codec->encode == NULL)!\n");
	return 0;
    }

    return vf_next_config(vf,width,height,d_width,d_height,flags,IMGFMT_MPEGPES);
}
Exemple #9
0
static unsigned int find_best_out(vf_instance_t *vf, int in_format){
    unsigned int best=0;
    int i = -1;
    int normalized_format = normalize_yuvp16(in_format);
    int j = normalized_format ? -2 : -1;
    int format = 0;

    // find the best outfmt:
    while (1) {
        int ret;
        if (j < 0) {
            format = j == -1 && normalized_format ? normalized_format : in_format;
            j++;
        } else if (i < 0) {
            while (preferred_conversions[j][0] &&
                   preferred_conversions[j][0] != in_format)
                j++;
            format = preferred_conversions[j++][1];
            // switch to standard list
            if (!format)
                i = 0;
        }
        if (i >= 0)
            format = outfmt_list[i++];
        if (!format)
            break;
        ret = vf_next_query_format(vf, format);

        mp_msg(MSGT_VFILTER,MSGL_DBG2,"scale: query(%s) -> %d\n",vo_format_name(format),ret&3);
        if(ret&VFCAP_CSP_SUPPORTED_BY_HW){
            best=format; // no conversion -> bingo!
            break;
        }
        if(ret&VFCAP_CSP_SUPPORTED && !best)
            best=format; // best with conversion
    }
    return best;
}
Exemple #10
0
static int vf_default_query_format(struct vf_instance *vf, unsigned int fmt)
{
    return vf_next_query_format(vf, fmt);
}
Exemple #11
0
static int query_format(struct vf_instance *vf, unsigned int fmt)
{
    if (mp_sws_supported_format(fmt))
        return vf_next_query_format(vf, fmt);
    return 0;
}
Exemple #12
0
static int query_format(struct vf_instance *vf, unsigned int fmt){
    return vf_next_query_format(vf,IMGFMT_YV12) & (~VFCAP_CSP_SUPPORTED_BY_HW);
}
Exemple #13
0
static int query_format(struct vf_instance_s* vf, unsigned int fmt){
    if (fmt == IMGFMT_YVU9 || fmt == IMGFMT_IF09)
	return vf_next_query_format(vf,IMGFMT_YV12) & (~VFCAP_CSP_SUPPORTED_BY_HW);
    return 0;
}
Exemple #14
0
static int query_format(struct vf_instance_s* vf, unsigned int outfmt){
    unsigned int fmt=getfmt(outfmt,vf->priv->forced);
    if(!fmt) return 0;
    return vf_next_query_format(vf,fmt) & (~VFCAP_CSP_SUPPORTED_BY_HW);
}
Exemple #15
0
static int query_format(struct vf_instance *vf, unsigned int fmt)
{
    return vf_next_query_format(vf, fmt == IMGFMT_VDPAU ? IMGFMT_NV12 : fmt);
}
Exemple #16
0
static int recreate_video_proc(struct vf_instance *vf)
{
    struct vf_priv_s *p = vf->priv;
    HRESULT hr;

    destroy_video_proc(vf);

    D3D11_VIDEO_PROCESSOR_CONTENT_DESC vpdesc = {
        .InputFrameFormat = p->d3d_frame_format,
        .InputWidth = p->c_w,
        .InputHeight = p->c_h,
        .OutputWidth = p->params.w,
        .OutputHeight = p->params.h,
    };
    hr = ID3D11VideoDevice_CreateVideoProcessorEnumerator(p->video_dev, &vpdesc,
                                                          &p->vp_enum);
    if (FAILED(hr))
        goto fail;

    D3D11_VIDEO_PROCESSOR_CAPS caps;
    hr = ID3D11VideoProcessorEnumerator_GetVideoProcessorCaps(p->vp_enum, &caps);
    if (FAILED(hr))
        goto fail;

    MP_VERBOSE(vf, "Found %d rate conversion caps. Looking for caps=0x%x.\n",
               (int)caps.RateConversionCapsCount, p->mode);

    int rindex = -1;
    for (int n = 0; n < caps.RateConversionCapsCount; n++) {
        D3D11_VIDEO_PROCESSOR_RATE_CONVERSION_CAPS rcaps;
        hr = ID3D11VideoProcessorEnumerator_GetVideoProcessorRateConversionCaps
                (p->vp_enum, n, &rcaps);
        if (FAILED(hr))
            goto fail;
        MP_VERBOSE(vf, "  - %d: 0x%08x\n", n, (unsigned)rcaps.ProcessorCaps);
        if (rcaps.ProcessorCaps & p->mode) {
            MP_VERBOSE(vf, "       (matching)\n");
            if (rindex < 0)
                rindex = n;
        }
    }

    if (rindex < 0) {
        MP_WARN(vf, "No fitting video processor found, picking #0.\n");
        rindex = 0;
    }

    // TOOD: so, how do we select which rate conversion mode the processor uses?

    hr = ID3D11VideoDevice_CreateVideoProcessor(p->video_dev, p->vp_enum, rindex,
                                                &p->video_proc);
    if (FAILED(hr)) {
        MP_ERR(vf, "Failed to create D3D11 video processor.\n");
        goto fail;
    }

    // Note: libavcodec does not support cropping left/top with hwaccel.
    RECT src_rc = {
        .right = p->params.w,
        .bottom = p->params.h,
    };
    ID3D11VideoContext_VideoProcessorSetStreamSourceRect(p->video_ctx,
                                                         p->video_proc,
                                                         0, TRUE, &src_rc);

    // This is supposed to stop drivers from f*****g up the video quality.
    ID3D11VideoContext_VideoProcessorSetStreamAutoProcessingMode(p->video_ctx,
                                                                 p->video_proc,
                                                                 0, FALSE);

    ID3D11VideoContext_VideoProcessorSetStreamOutputRate(p->video_ctx,
                                                         p->video_proc,
                                                         0,
                                                         D3D11_VIDEO_PROCESSOR_OUTPUT_RATE_NORMAL,
                                                         FALSE, 0);

    D3D11_VIDEO_PROCESSOR_COLOR_SPACE csp = {
        .YCbCr_Matrix = p->params.color.space != MP_CSP_BT_601,
        .Nominal_Range = p->params.color.levels == MP_CSP_LEVELS_TV ? 1 : 2,
    };
    ID3D11VideoContext_VideoProcessorSetStreamColorSpace(p->video_ctx,
                                                         p->video_proc,
                                                         0, &csp);
    if (p->out_rgb) {
        if (p->params.color.space != MP_CSP_BT_601 &&
            p->params.color.space != MP_CSP_BT_709)
        {
            MP_WARN(vf, "Unsupported video colorspace (%s/%s). Consider "
                    "disabling hardware decoding, or using "
                    "--hwdec=d3d11va-copy to get correct output.\n",
                    m_opt_choice_str(mp_csp_names, p->params.color.space),
                    m_opt_choice_str(mp_csp_levels_names, p->params.color.levels));
        }
    } else {
        ID3D11VideoContext_VideoProcessorSetOutputColorSpace(p->video_ctx,
                                                             p->video_proc,
                                                             &csp);
    }

    return 0;
fail:
    destroy_video_proc(vf);
    return -1;
}

static int render(struct vf_instance *vf)
{
    struct vf_priv_s *p = vf->priv;
    int res = -1;
    HRESULT hr;
    ID3D11VideoProcessorInputView *in_view = NULL;
    ID3D11VideoProcessorOutputView *out_view = NULL;
    struct mp_image *in = NULL, *out = NULL;
    out = mp_image_pool_get(p->pool, p->out_params.imgfmt, p->params.w, p->params.h);
    if (!out)
        goto cleanup;

    ID3D11Texture2D *d3d_out_tex = (void *)out->planes[1];

    in = mp_refqueue_get(p->queue, 0);
    if (!in)
        goto cleanup;
    ID3D11Texture2D *d3d_tex = (void *)in->planes[1];
    int d3d_subindex = (intptr_t)in->planes[2];

    mp_image_copy_attributes(out, in);

    D3D11_VIDEO_FRAME_FORMAT d3d_frame_format;
    if (!mp_refqueue_should_deint(p->queue)) {
        d3d_frame_format = D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE;
    } else if (mp_refqueue_top_field_first(p->queue)) {
        d3d_frame_format = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_TOP_FIELD_FIRST;
    } else {
        d3d_frame_format = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_BOTTOM_FIELD_FIRST;
    }

    D3D11_TEXTURE2D_DESC texdesc;
    ID3D11Texture2D_GetDesc(d3d_tex, &texdesc);
    if (!p->video_proc || p->c_w != texdesc.Width || p->c_h != texdesc.Height ||
        p->d3d_frame_format != d3d_frame_format)
    {
        p->c_w = texdesc.Width;
        p->c_h = texdesc.Height;
        p->d3d_frame_format = d3d_frame_format;
        if (recreate_video_proc(vf) < 0)
            goto cleanup;
    }

    if (!mp_refqueue_should_deint(p->queue)) {
        d3d_frame_format = D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE;
    } else if (mp_refqueue_is_top_field(p->queue)) {
        d3d_frame_format = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_TOP_FIELD_FIRST;
    } else {
        d3d_frame_format = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_BOTTOM_FIELD_FIRST;
    }

    ID3D11VideoContext_VideoProcessorSetStreamFrameFormat(p->video_ctx,
                                                          p->video_proc,
                                                          0, d3d_frame_format);

    D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC indesc = {
        .ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D,
        .Texture2D = {
            .ArraySlice = d3d_subindex,
        },
    };
    hr = ID3D11VideoDevice_CreateVideoProcessorInputView(p->video_dev,
                                                         (ID3D11Resource *)d3d_tex,
                                                         p->vp_enum, &indesc,
                                                         &in_view);
    if (FAILED(hr)) {
        MP_ERR(vf, "Could not create ID3D11VideoProcessorInputView\n");
        goto cleanup;
    }

    D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC outdesc = {
        .ViewDimension = D3D11_VPOV_DIMENSION_TEXTURE2D,
    };
    hr = ID3D11VideoDevice_CreateVideoProcessorOutputView(p->video_dev,
                                                          (ID3D11Resource *)d3d_out_tex,
                                                          p->vp_enum, &outdesc,
                                                          &out_view);
    if (FAILED(hr))
        goto cleanup;

    D3D11_VIDEO_PROCESSOR_STREAM stream = {
        .Enable = TRUE,
        .pInputSurface = in_view,
    };
    int frame = mp_refqueue_is_second_field(p->queue);
    hr = ID3D11VideoContext_VideoProcessorBlt(p->video_ctx, p->video_proc,
                                              out_view, frame, 1, &stream);
    if (FAILED(hr)) {
        MP_ERR(vf, "VideoProcessorBlt failed.\n");
        goto cleanup;
    }

    res = 0;
cleanup:
    if (in_view)
        ID3D11VideoProcessorInputView_Release(in_view);
    if (out_view)
        ID3D11VideoProcessorOutputView_Release(out_view);
    if (res >= 0) {
        vf_add_output_frame(vf, out);
    } else {
        talloc_free(out);
    }
    mp_refqueue_next_field(p->queue);
    return res;
}

static int filter_out(struct vf_instance *vf)
{
    struct vf_priv_s *p = vf->priv;

    if (!mp_refqueue_has_output(p->queue))
        return 0;

    // no filtering
    if (!mp_refqueue_should_deint(p->queue) && !p->require_filtering) {
        struct mp_image *in = mp_image_new_ref(mp_refqueue_get(p->queue, 0));
        if (!in)
            return -1;
        mp_image_set_params(in, &p->out_params);
        vf_add_output_frame(vf, in);
        mp_refqueue_next(p->queue);
        return 0;
    }

    return render(vf);
}

static int reconfig(struct vf_instance *vf, struct mp_image_params *in,
                    struct mp_image_params *out)
{
    struct vf_priv_s *p = vf->priv;

    flush_frames(vf);
    talloc_free(p->pool);
    p->pool = NULL;

    destroy_video_proc(vf);

    *out = *in;

    if (vf_next_query_format(vf, IMGFMT_D3D11VA) ||
        vf_next_query_format(vf, IMGFMT_D3D11NV12))
    {
        out->imgfmt = vf_next_query_format(vf, IMGFMT_D3D11VA)
                    ? IMGFMT_D3D11VA : IMGFMT_D3D11NV12;
        out->hw_subfmt = IMGFMT_NV12;
        p->out_format = DXGI_FORMAT_NV12;
        p->out_shared = false;
        p->out_rgb = false;
    } else {
        out->imgfmt = IMGFMT_D3D11RGB;
        out->hw_subfmt = IMGFMT_RGB0;
        p->out_format = DXGI_FORMAT_B8G8R8A8_UNORM;
        p->out_shared = true;
        p->out_rgb = true;
    }

    p->require_filtering = in->hw_subfmt != out->hw_subfmt;

    p->params = *in;
    p->out_params = *out;

    p->pool = mp_image_pool_new(20);
    mp_image_pool_set_allocator(p->pool, alloc_pool, vf);
    mp_image_pool_set_lru(p->pool);

    return 0;
}

static void uninit(struct vf_instance *vf)
{
    struct vf_priv_s *p = vf->priv;

    destroy_video_proc(vf);

    flush_frames(vf);
    mp_refqueue_free(p->queue);
    talloc_free(p->pool);

    if (p->video_ctx)
        ID3D11VideoContext_Release(p->video_ctx);

    if (p->video_dev)
        ID3D11VideoDevice_Release(p->video_dev);

    if (p->device_ctx)
        ID3D11DeviceContext_Release(p->device_ctx);

    if (p->vo_dev)
        ID3D11Device_Release(p->vo_dev);
}

static int query_format(struct vf_instance *vf, unsigned int imgfmt)
{
    if (imgfmt == IMGFMT_D3D11VA ||
        imgfmt == IMGFMT_D3D11NV12 ||
        imgfmt == IMGFMT_D3D11RGB)
    {
        return vf_next_query_format(vf, IMGFMT_D3D11VA) ||
               vf_next_query_format(vf, IMGFMT_D3D11NV12) ||
               vf_next_query_format(vf, IMGFMT_D3D11RGB);
    }
    return 0;
}

static bool test_conversion(int in, int out)
{
    return (in == IMGFMT_D3D11VA ||
            in == IMGFMT_D3D11NV12 ||
            in == IMGFMT_D3D11RGB) &&
           (out == IMGFMT_D3D11VA ||
            out == IMGFMT_D3D11NV12 ||
            out == IMGFMT_D3D11RGB);
}

static int control(struct vf_instance *vf, int request, void* data)
{
    struct vf_priv_s *p = vf->priv;
    switch (request){
    case VFCTRL_GET_DEINTERLACE:
        *(int*)data = !!p->deint_enabled;
        return true;
    case VFCTRL_SET_DEINTERLACE:
        p->deint_enabled = !!*(int*)data;
        return true;
    case VFCTRL_SEEK_RESET:
        flush_frames(vf);
        return true;
    default:
        return CONTROL_UNKNOWN;
    }
}

static int vf_open(vf_instance_t *vf)
{
    struct vf_priv_s *p = vf->priv;

    vf->reconfig = reconfig;
    vf->filter_ext = filter_ext;
    vf->filter_out = filter_out;
    vf->query_format = query_format;
    vf->uninit = uninit;
    vf->control = control;

    p->queue = mp_refqueue_alloc();

    p->vo_dev = hwdec_devices_load(vf->hwdec_devs, HWDEC_D3D11VA);
    if (!p->vo_dev)
        return 0;

    ID3D11Device_AddRef(p->vo_dev);

    HRESULT hr;

    hr = ID3D11Device_QueryInterface(p->vo_dev, &IID_ID3D11VideoDevice,
                                     (void **)&p->video_dev);
    if (FAILED(hr))
        goto fail;

    ID3D11Device_GetImmediateContext(p->vo_dev, &p->device_ctx);
    if (!p->device_ctx)
        goto fail;
    hr = ID3D11DeviceContext_QueryInterface(p->device_ctx, &IID_ID3D11VideoContext,
                                            (void **)&p->video_ctx);
    if (FAILED(hr))
        goto fail;

    return 1;

fail:
    uninit(vf);
    return 0;
}

#define OPT_BASE_STRUCT struct vf_priv_s
static const m_option_t vf_opts_fields[] = {
    OPT_FLAG("deint", deint_enabled, 0),
    OPT_FLAG("interlaced-only", interlaced_only, 0),
    OPT_CHOICE("mode", mode, 0,
        ({"blend", D3D11_VIDEO_PROCESSOR_PROCESSOR_CAPS_DEINTERLACE_BLEND},
         {"bob", D3D11_VIDEO_PROCESSOR_PROCESSOR_CAPS_DEINTERLACE_BOB},
         {"adaptive", D3D11_VIDEO_PROCESSOR_PROCESSOR_CAPS_DEINTERLACE_ADAPTIVE},
         {"mocomp", D3D11_VIDEO_PROCESSOR_PROCESSOR_CAPS_DEINTERLACE_MOTION_COMPENSATION},
Exemple #17
0
static int query_format(struct vf_instance *vf, unsigned int fmt)
{
    if (!IMGFMT_IS_HWACCEL(fmt))
        return vf_next_query_format(vf, fmt);
    return 0;
}
Exemple #18
0
static int query_format(struct vf_instance_s* vf, unsigned int fmt){
  return (vf_next_query_format(vf,fmt));
}
Exemple #19
0
int main(void) {
    avfilter_register_all();
    vf_next_query_format();
    return 0;
}
static int query_format(struct vf_instance_s* vf, unsigned int fmt){
    if(fmt==vf->priv->fmt)
	return vf_next_query_format(vf,fmt);
    return 0;
}