Beispiel #1
0
STDMETHODIMP CDecD3D11::AllocateFramesContext(int width, int height, AVPixelFormat format, int nSurfaces, AVBufferRef **ppFramesCtx)
{
  ASSERT(m_pAVCtx);
  ASSERT(m_pDevCtx);
  ASSERT(ppFramesCtx);

  // unref any old buffer
  av_buffer_unref(ppFramesCtx);
  SafeRelease(&m_pD3D11StagingTexture);

  // allocate a new frames context for the device context
  *ppFramesCtx = av_hwframe_ctx_alloc(m_pDevCtx);
  if (*ppFramesCtx == nullptr)
    return E_OUTOFMEMORY;

  AVHWFramesContext *pFrames = (AVHWFramesContext *)(*ppFramesCtx)->data;
  pFrames->format = AV_PIX_FMT_D3D11;
  pFrames->sw_format = (format == AV_PIX_FMT_YUV420P10) ? AV_PIX_FMT_P010 : AV_PIX_FMT_NV12;
  pFrames->width = width;
  pFrames->height = height;
  pFrames->initial_pool_size = nSurfaces;

  AVD3D11VAFramesContext *pFramesHWContext = (AVD3D11VAFramesContext *)pFrames->hwctx;
  pFramesHWContext->BindFlags |= D3D11_BIND_DECODER | D3D11_BIND_SHADER_RESOURCE;
  pFramesHWContext->MiscFlags |= D3D11_RESOURCE_MISC_SHARED;

  int ret = av_hwframe_ctx_init(*ppFramesCtx);
  if (ret < 0)
  {
    av_buffer_unref(ppFramesCtx);
    return E_FAIL;
  }

  return S_OK;
}
Beispiel #2
0
static int cudaupload_config_output(AVFilterLink *outlink)
{
    AVFilterContext *ctx = outlink->src;
    AVFilterLink *inlink = ctx->inputs[0];
    CudaUploadContext *s = ctx->priv;

    AVHWFramesContext *hwframe_ctx;
    int ret;

    av_buffer_unref(&s->hwframe);
    s->hwframe = av_hwframe_ctx_alloc(s->hwdevice);
    if (!s->hwframe)
        return AVERROR(ENOMEM);

    hwframe_ctx            = (AVHWFramesContext*)s->hwframe->data;
    hwframe_ctx->format    = AV_PIX_FMT_CUDA;
    hwframe_ctx->sw_format = inlink->format;
    hwframe_ctx->width     = inlink->w;
    hwframe_ctx->height    = inlink->h;

    ret = av_hwframe_ctx_init(s->hwframe);
    if (ret < 0)
        return ret;

    outlink->hw_frames_ctx = av_buffer_ref(s->hwframe);
    if (!outlink->hw_frames_ctx)
        return AVERROR(ENOMEM);

    return 0;
}
static int init_out_pool(AVFilterContext *ctx,
                         int out_width, int out_height)
{
    QSVScaleContext *s = ctx->priv;
    AVFilterLink *outlink = ctx->outputs[0];

    AVHWFramesContext *in_frames_ctx;
    AVHWFramesContext *out_frames_ctx;
    AVQSVFramesContext *in_frames_hwctx;
    AVQSVFramesContext *out_frames_hwctx;
    enum AVPixelFormat in_format;
    enum AVPixelFormat out_format;
    int i, ret;

    /* check that we have a hw context */
    if (!ctx->inputs[0]->hw_frames_ctx) {
        av_log(ctx, AV_LOG_ERROR, "No hw context provided on input\n");
        return AVERROR(EINVAL);
    }
    in_frames_ctx   = (AVHWFramesContext*)ctx->inputs[0]->hw_frames_ctx->data;
    in_frames_hwctx = in_frames_ctx->hwctx;

    in_format     = in_frames_ctx->sw_format;
    out_format    = (s->format == AV_PIX_FMT_NONE) ? in_format : s->format;

    outlink->hw_frames_ctx = av_hwframe_ctx_alloc(in_frames_ctx->device_ref);
    if (!outlink->hw_frames_ctx)
        return AVERROR(ENOMEM);
    out_frames_ctx   = (AVHWFramesContext*)outlink->hw_frames_ctx->data;
    out_frames_hwctx = out_frames_ctx->hwctx;

    out_frames_ctx->format            = AV_PIX_FMT_QSV;
    out_frames_ctx->width             = FFALIGN(out_width,  32);
    out_frames_ctx->height            = FFALIGN(out_height, 32);
    out_frames_ctx->sw_format         = out_format;
    out_frames_ctx->initial_pool_size = 4;

    out_frames_hwctx->frame_type = in_frames_hwctx->frame_type;

    ret = ff_filter_init_hw_frames(ctx, outlink, 32);
    if (ret < 0)
        return ret;

    ret = av_hwframe_ctx_init(outlink->hw_frames_ctx);
    if (ret < 0)
        return ret;

    for (i = 0; i < out_frames_hwctx->nb_surfaces; i++) {
        mfxFrameInfo *info = &out_frames_hwctx->surfaces[i].Info;
        info->CropW = out_width;
        info->CropH = out_height;
    }

    return 0;
}
Beispiel #4
0
static av_cold int init_stage(CUDAScaleContext *s, AVBufferRef *device_ctx)
{
    AVBufferRef *out_ref = NULL;
    AVHWFramesContext *out_ctx;
    int in_sw, in_sh, out_sw, out_sh;
    int ret, i;

    av_pix_fmt_get_chroma_sub_sample(s->in_fmt,  &in_sw,  &in_sh);
    av_pix_fmt_get_chroma_sub_sample(s->out_fmt, &out_sw, &out_sh);
    if (!s->planes_out[0].width) {
        s->planes_out[0].width  = s->planes_in[0].width;
        s->planes_out[0].height = s->planes_in[0].height;
    }

    for (i = 1; i < FF_ARRAY_ELEMS(s->planes_in); i++) {
        s->planes_in[i].width   = s->planes_in[0].width   >> in_sw;
        s->planes_in[i].height  = s->planes_in[0].height  >> in_sh;
        s->planes_out[i].width  = s->planes_out[0].width  >> out_sw;
        s->planes_out[i].height = s->planes_out[0].height >> out_sh;
    }

    out_ref = av_hwframe_ctx_alloc(device_ctx);
    if (!out_ref)
        return AVERROR(ENOMEM);
    out_ctx = (AVHWFramesContext*)out_ref->data;

    out_ctx->format    = AV_PIX_FMT_CUDA;
    out_ctx->sw_format = s->out_fmt;
    out_ctx->width     = FFALIGN(s->planes_out[0].width,  32);
    out_ctx->height    = FFALIGN(s->planes_out[0].height, 32);

    ret = av_hwframe_ctx_init(out_ref);
    if (ret < 0)
        goto fail;

    av_frame_unref(s->frame);
    ret = av_hwframe_get_buffer(out_ref, s->frame, 0);
    if (ret < 0)
        goto fail;

    s->frame->width  = s->planes_out[0].width;
    s->frame->height = s->planes_out[0].height;

    av_buffer_unref(&s->frames_ctx);
    s->frames_ctx = out_ref;

    return 0;
fail:
    av_buffer_unref(&out_ref);
    return ret;
}
int ff_opencl_filter_config_output(AVFilterLink *outlink)
{
    AVFilterContext   *avctx = outlink->src;
    OpenCLFilterContext *ctx = avctx->priv;
    AVBufferRef       *output_frames_ref = NULL;
    AVHWFramesContext *output_frames;
    int err;

    av_buffer_unref(&outlink->hw_frames_ctx);

    if (!ctx->device_ref) {
        if (!avctx->hw_device_ctx) {
            av_log(avctx, AV_LOG_ERROR, "OpenCL filtering requires an "
                   "OpenCL device.\n");
            return AVERROR(EINVAL);
        }

        err = opencl_filter_set_device(avctx, avctx->hw_device_ctx);
        if (err < 0)
            return err;
    }

    output_frames_ref = av_hwframe_ctx_alloc(ctx->device_ref);
    if (!output_frames_ref) {
        err = AVERROR(ENOMEM);
        goto fail;
    }
    output_frames = (AVHWFramesContext*)output_frames_ref->data;

    output_frames->format    = AV_PIX_FMT_OPENCL;
    output_frames->sw_format = ctx->output_format;
    output_frames->width     = ctx->output_width;
    output_frames->height    = ctx->output_height;

    err = av_hwframe_ctx_init(output_frames_ref);
    if (err < 0) {
        av_log(avctx, AV_LOG_ERROR, "Failed to initialise output "
               "frames: %d.\n", err);
        goto fail;
    }

    outlink->hw_frames_ctx = output_frames_ref;
    outlink->w = ctx->output_width;
    outlink->h = ctx->output_height;

    return 0;
fail:
    av_buffer_unref(&output_frames_ref);
    return err;
}
Beispiel #6
0
int qsv_init(AVCodecContext *s)
{
    InputStream *ist = s->opaque;
    AVHWFramesContext *frames_ctx;
    AVQSVFramesContext *frames_hwctx;
    int ret;

    if (!hw_device_ctx) {
        ret = qsv_device_init(ist);
        if (ret < 0)
            return ret;
    }

    av_buffer_unref(&ist->hw_frames_ctx);
    ist->hw_frames_ctx = av_hwframe_ctx_alloc(hw_device_ctx);
    if (!ist->hw_frames_ctx)
        return AVERROR(ENOMEM);

    frames_ctx   = (AVHWFramesContext*)ist->hw_frames_ctx->data;
    frames_hwctx = frames_ctx->hwctx;

    frames_ctx->width             = FFALIGN(s->coded_width,  32);
    frames_ctx->height            = FFALIGN(s->coded_height, 32);
    frames_ctx->format            = AV_PIX_FMT_QSV;
    frames_ctx->sw_format         = s->sw_pix_fmt;
    frames_ctx->initial_pool_size = 64;
    frames_hwctx->frame_type      = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;

    ret = av_hwframe_ctx_init(ist->hw_frames_ctx);
    if (ret < 0) {
        av_log(NULL, AV_LOG_ERROR, "Error initializing a QSV frame pool\n");
        return ret;
    }

    ist->hwaccel_get_buffer = qsv_get_buffer;
    ist->hwaccel_uninit     = qsv_uninit;

    return 0;
}
Beispiel #7
0
int avcodec_get_hw_frames_parameters(AVCodecContext *avctx,
                                     AVBufferRef *device_ref,
                                     enum AVPixelFormat hw_pix_fmt,
                                     AVBufferRef **out_frames_ref)
{
    AVBufferRef *frames_ref = NULL;
    AVHWAccel *hwa = find_hwaccel(avctx->codec_id, hw_pix_fmt);
    int ret;

    if (!hwa || !hwa->frame_params)
        return AVERROR(ENOENT);

    frames_ref = av_hwframe_ctx_alloc(device_ref);
    if (!frames_ref)
        return AVERROR(ENOMEM);

    ret = hwa->frame_params(avctx, frames_ref);
    if (ret >= 0) {
        *out_frames_ref = frames_ref;
    } else {
        av_buffer_unref(&frames_ref);
    }
    return ret;
}
Beispiel #8
0
int qsv_transcode_init(OutputStream *ost)
{
    InputStream *ist;
    const enum AVPixelFormat *pix_fmt;

    int err, i;
    AVBufferRef *encode_frames_ref = NULL;
    AVHWFramesContext *encode_frames;
    AVQSVFramesContext *qsv_frames;

    /* check if the encoder supports QSV */
    if (!ost->enc->pix_fmts)
        return 0;
    for (pix_fmt = ost->enc->pix_fmts; *pix_fmt != AV_PIX_FMT_NONE; pix_fmt++)
        if (*pix_fmt == AV_PIX_FMT_QSV)
            break;
    if (*pix_fmt == AV_PIX_FMT_NONE)
        return 0;

    if (strcmp(ost->avfilter, "null") || ost->source_index < 0)
        return 0;

    /* check if the decoder supports QSV and the output only goes to this stream */
    ist = input_streams[ost->source_index];
    if (ist->hwaccel_id != HWACCEL_QSV || !ist->dec || !ist->dec->pix_fmts)
        return 0;
    for (pix_fmt = ist->dec->pix_fmts; *pix_fmt != AV_PIX_FMT_NONE; pix_fmt++)
        if (*pix_fmt == AV_PIX_FMT_QSV)
            break;
    if (*pix_fmt == AV_PIX_FMT_NONE)
        return 0;

    for (i = 0; i < nb_output_streams; i++)
        if (output_streams[i] != ost &&
            output_streams[i]->source_index == ost->source_index)
            return 0;

    av_log(NULL, AV_LOG_VERBOSE, "Setting up QSV transcoding\n");

    if (!hw_device_ctx) {
        err = qsv_device_init(ist);
        if (err < 0)
            goto fail;
    }

    // This creates a dummy hw_frames_ctx for the encoder to be
    // suitably initialised.  It only contains one real frame, so
    // hopefully doesn't waste too much memory.

    encode_frames_ref = av_hwframe_ctx_alloc(hw_device_ctx);
    if (!encode_frames_ref) {
        err = AVERROR(ENOMEM);
        goto fail;
    }
    encode_frames = (AVHWFramesContext*)encode_frames_ref->data;
    qsv_frames = encode_frames->hwctx;

    encode_frames->width     = FFALIGN(ist->resample_width,  32);
    encode_frames->height    = FFALIGN(ist->resample_height, 32);
    encode_frames->format    = AV_PIX_FMT_QSV;
    encode_frames->sw_format = AV_PIX_FMT_NV12;
    encode_frames->initial_pool_size = 1;

    qsv_frames->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;

    err = av_hwframe_ctx_init(encode_frames_ref);
    if (err < 0)
        goto fail;

    ist->dec_ctx->pix_fmt       = AV_PIX_FMT_QSV;
    ist->resample_pix_fmt       = AV_PIX_FMT_QSV;

    ost->enc_ctx->pix_fmt       = AV_PIX_FMT_QSV;
    ost->enc_ctx->hw_frames_ctx = encode_frames_ref;

    return 0;

fail:
    av_buffer_unref(&encode_frames_ref);
    return err;
}
Beispiel #9
0
Datei: qsv.c Projekt: libav/libav
static mfxStatus qsv_frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
                                 mfxFrameAllocResponse *resp)
{
    QSVFramesContext *ctx = pthis;
    int ret;

    /* this should only be called from an encoder or decoder and
     * only allocates video memory frames */
    if (!(req->Type & (MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET |
                       MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET))         ||
        !(req->Type & (MFX_MEMTYPE_FROM_DECODE | MFX_MEMTYPE_FROM_ENCODE)))
        return MFX_ERR_UNSUPPORTED;

    if (req->Type & MFX_MEMTYPE_EXTERNAL_FRAME) {
        /* external frames -- fill from the caller-supplied frames context */
        AVHWFramesContext *frames_ctx = (AVHWFramesContext*)ctx->hw_frames_ctx->data;
        AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
        mfxFrameInfo      *i  = &req->Info;
        mfxFrameInfo      *i1 = &frames_hwctx->surfaces[0].Info;

        if (i->Width  > i1->Width  || i->Height > i1->Height ||
            i->FourCC != i1->FourCC || i->ChromaFormat != i1->ChromaFormat) {
            av_log(ctx->logctx, AV_LOG_ERROR, "Mismatching surface properties in an "
                   "allocation request: %dx%d %d %d vs %dx%d %d %d\n",
                   i->Width,  i->Height,  i->FourCC,  i->ChromaFormat,
                   i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
            return MFX_ERR_UNSUPPORTED;
        }

        ret = qsv_setup_mids(resp, ctx->hw_frames_ctx, ctx->mids_buf);
        if (ret < 0) {
            av_log(ctx->logctx, AV_LOG_ERROR,
                   "Error filling an external frame allocation request\n");
            return MFX_ERR_MEMORY_ALLOC;
        }
    } else if (req->Type & MFX_MEMTYPE_INTERNAL_FRAME) {
        /* internal frames -- allocate a new hw frames context */
        AVHWFramesContext *ext_frames_ctx = (AVHWFramesContext*)ctx->hw_frames_ctx->data;
        mfxFrameInfo      *i  = &req->Info;

        AVBufferRef *frames_ref, *mids_buf;
        AVHWFramesContext *frames_ctx;
        AVQSVFramesContext *frames_hwctx;

        frames_ref = av_hwframe_ctx_alloc(ext_frames_ctx->device_ref);
        if (!frames_ref)
            return MFX_ERR_MEMORY_ALLOC;

        frames_ctx   = (AVHWFramesContext*)frames_ref->data;
        frames_hwctx = frames_ctx->hwctx;

        frames_ctx->format            = AV_PIX_FMT_QSV;
        frames_ctx->sw_format         = qsv_map_fourcc(i->FourCC);
        frames_ctx->width             = i->Width;
        frames_ctx->height            = i->Height;
        frames_ctx->initial_pool_size = req->NumFrameSuggested;

        frames_hwctx->frame_type      = req->Type;

        ret = av_hwframe_ctx_init(frames_ref);
        if (ret < 0) {
            av_log(ctx->logctx, AV_LOG_ERROR,
                   "Error initializing a frames context for an internal frame "
                   "allocation request\n");
            av_buffer_unref(&frames_ref);
            return MFX_ERR_MEMORY_ALLOC;
        }

        mids_buf = qsv_create_mids(frames_ref);
        if (!mids_buf) {
            av_buffer_unref(&frames_ref);
            return MFX_ERR_MEMORY_ALLOC;
        }

        ret = qsv_setup_mids(resp, frames_ref, mids_buf);
        av_buffer_unref(&mids_buf);
        av_buffer_unref(&frames_ref);
        if (ret < 0) {
            av_log(ctx->logctx, AV_LOG_ERROR,
                   "Error filling an internal frame allocation request\n");
            return MFX_ERR_MEMORY_ALLOC;
        }
    } else {
        return MFX_ERR_UNSUPPORTED;
    }

    return MFX_ERR_NONE;
}
Beispiel #10
0
static int scale_vaapi_config_output(AVFilterLink *outlink)
{
    AVFilterContext *avctx = outlink->src;
    ScaleVAAPIContext *ctx = avctx->priv;
    AVVAAPIHWConfig *hwconfig = NULL;
    AVHWFramesConstraints *constraints = NULL;
    AVVAAPIFramesContext *va_frames;
    VAStatus vas;
    int err, i;

    scale_vaapi_pipeline_uninit(ctx);

    ctx->device_ref = av_buffer_ref(ctx->input_frames->device_ref);
    ctx->hwctx = ((AVHWDeviceContext*)ctx->device_ref->data)->hwctx;

    av_assert0(ctx->va_config == VA_INVALID_ID);
    vas = vaCreateConfig(ctx->hwctx->display, VAProfileNone,
                         VAEntrypointVideoProc, 0, 0, &ctx->va_config);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(ctx, AV_LOG_ERROR, "Failed to create processing pipeline "
               "config: %d (%s).\n", vas, vaErrorStr(vas));
        err = AVERROR(EIO);
        goto fail;
    }

    hwconfig = av_hwdevice_hwconfig_alloc(ctx->device_ref);
    if (!hwconfig) {
        err = AVERROR(ENOMEM);
        goto fail;
    }
    hwconfig->config_id = ctx->va_config;

    constraints = av_hwdevice_get_hwframe_constraints(ctx->device_ref,
                                                      hwconfig);
    if (!constraints) {
        err = AVERROR(ENOMEM);
        goto fail;
    }

    if (ctx->output_format == AV_PIX_FMT_NONE)
        ctx->output_format = ctx->input_frames->sw_format;
    if (constraints->valid_sw_formats) {
        for (i = 0; constraints->valid_sw_formats[i] != AV_PIX_FMT_NONE; i++) {
            if (ctx->output_format == constraints->valid_sw_formats[i])
                break;
        }
        if (constraints->valid_sw_formats[i] == AV_PIX_FMT_NONE) {
            av_log(ctx, AV_LOG_ERROR, "Hardware does not support output "
                   "format %s.\n", av_get_pix_fmt_name(ctx->output_format));
            err = AVERROR(EINVAL);
            goto fail;
        }
    }

    if (ctx->output_width  < constraints->min_width  ||
        ctx->output_height < constraints->min_height ||
        ctx->output_width  > constraints->max_width  ||
        ctx->output_height > constraints->max_height) {
        av_log(ctx, AV_LOG_ERROR, "Hardware does not support scaling to "
               "size %dx%d (constraints: width %d-%d height %d-%d).\n",
               ctx->output_width, ctx->output_height,
               constraints->min_width,  constraints->max_width,
               constraints->min_height, constraints->max_height);
        err = AVERROR(EINVAL);
        goto fail;
    }

    ctx->output_frames_ref = av_hwframe_ctx_alloc(ctx->device_ref);
    if (!ctx->output_frames_ref) {
        av_log(ctx, AV_LOG_ERROR, "Failed to create HW frame context "
               "for output.\n");
        err = AVERROR(ENOMEM);
        goto fail;
    }

    ctx->output_frames = (AVHWFramesContext*)ctx->output_frames_ref->data;

    ctx->output_frames->format    = AV_PIX_FMT_VAAPI;
    ctx->output_frames->sw_format = ctx->output_format;
    ctx->output_frames->width     = ctx->output_width;
    ctx->output_frames->height    = ctx->output_height;

    // The number of output frames we need is determined by what follows
    // the filter.  If it's an encoder with complex frame reference
    // structures then this could be very high.
    ctx->output_frames->initial_pool_size = 10;

    err = av_hwframe_ctx_init(ctx->output_frames_ref);
    if (err < 0) {
        av_log(ctx, AV_LOG_ERROR, "Failed to initialise VAAPI frame "
               "context for output: %d\n", err);
        goto fail;
    }

    va_frames = ctx->output_frames->hwctx;

    av_assert0(ctx->va_context == VA_INVALID_ID);
    vas = vaCreateContext(ctx->hwctx->display, ctx->va_config,
                          ctx->output_width, ctx->output_height,
                          VA_PROGRESSIVE,
                          va_frames->surface_ids, va_frames->nb_surfaces,
                          &ctx->va_context);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(ctx, AV_LOG_ERROR, "Failed to create processing pipeline "
               "context: %d (%s).\n", vas, vaErrorStr(vas));
        return AVERROR(EIO);
    }

    outlink->w = ctx->output_width;
    outlink->h = ctx->output_height;

    outlink->hw_frames_ctx = av_buffer_ref(ctx->output_frames_ref);
    if (!outlink->hw_frames_ctx) {
        err = AVERROR(ENOMEM);
        goto fail;
    }

    av_freep(&hwconfig);
    av_hwframe_constraints_free(&constraints);
    return 0;

fail:
    av_buffer_unref(&ctx->output_frames_ref);
    av_freep(&hwconfig);
    av_hwframe_constraints_free(&constraints);
    return err;
}
Beispiel #11
0
av_cold int ff_vaapi_encode_init(AVCodecContext *avctx,
                                 const VAAPIEncodeType *type)
{
    VAAPIEncodeContext *ctx = avctx->priv_data;
    AVVAAPIFramesContext *recon_hwctx = NULL;
    AVVAAPIHWConfig *hwconfig = NULL;
    AVHWFramesConstraints *constraints = NULL;
    enum AVPixelFormat recon_format;
    VAStatus vas;
    int err, i;

    if (!avctx->hw_frames_ctx) {
        av_log(avctx, AV_LOG_ERROR, "A hardware frames reference is "
               "required to associate the encoding device.\n");
        return AVERROR(EINVAL);
    }

    ctx->codec = type;
    ctx->codec_options = ctx->codec_options_data;

    ctx->priv_data = av_mallocz(type->priv_data_size);
    if (!ctx->priv_data) {
        err = AVERROR(ENOMEM);
        goto fail;
    }

    ctx->input_frames_ref = av_buffer_ref(avctx->hw_frames_ctx);
    if (!ctx->input_frames_ref) {
        err = AVERROR(ENOMEM);
        goto fail;
    }
    ctx->input_frames = (AVHWFramesContext*)ctx->input_frames_ref->data;

    ctx->device_ref = av_buffer_ref(ctx->input_frames->device_ref);
    if (!ctx->device_ref) {
        err = AVERROR(ENOMEM);
        goto fail;
    }
    ctx->device = (AVHWDeviceContext*)ctx->device_ref->data;
    ctx->hwctx = ctx->device->hwctx;

    err = ctx->codec->init(avctx);
    if (err < 0)
        goto fail;

    vas = vaCreateConfig(ctx->hwctx->display,
                         ctx->va_profile, ctx->va_entrypoint,
                         ctx->config_attributes, ctx->nb_config_attributes,
                         &ctx->va_config);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "Failed to create encode pipeline "
               "configuration: %d (%s).\n", vas, vaErrorStr(vas));
        err = AVERROR(EIO);
        goto fail;
    }

    hwconfig = av_hwdevice_hwconfig_alloc(ctx->device_ref);
    if (!hwconfig) {
        err = AVERROR(ENOMEM);
        goto fail;
    }
    hwconfig->config_id = ctx->va_config;

    constraints = av_hwdevice_get_hwframe_constraints(ctx->device_ref,
                                                      hwconfig);
    if (!constraints) {
        err = AVERROR(ENOMEM);
        goto fail;
    }

    // Probably we can use the input surface format as the surface format
    // of the reconstructed frames.  If not, we just pick the first (only?)
    // format in the valid list and hope that it all works.
    recon_format = AV_PIX_FMT_NONE;
    if (constraints->valid_sw_formats) {
        for (i = 0; constraints->valid_sw_formats[i] != AV_PIX_FMT_NONE; i++) {
            if (ctx->input_frames->sw_format ==
                constraints->valid_sw_formats[i]) {
                recon_format = ctx->input_frames->sw_format;
                break;
            }
        }
        if (recon_format == AV_PIX_FMT_NONE)
            recon_format = constraints->valid_sw_formats[i];
    } else {
        // No idea what to use; copy input format.
        recon_format = ctx->input_frames->sw_format;
    }
    av_log(avctx, AV_LOG_DEBUG, "Using %s as format of "
           "reconstructed frames.\n", av_get_pix_fmt_name(recon_format));

    if (ctx->aligned_width  < constraints->min_width  ||
        ctx->aligned_height < constraints->min_height ||
        ctx->aligned_width  > constraints->max_width ||
        ctx->aligned_height > constraints->max_height) {
        av_log(avctx, AV_LOG_ERROR, "Hardware does not support encoding at "
               "size %dx%d (constraints: width %d-%d height %d-%d).\n",
               ctx->aligned_width, ctx->aligned_height,
               constraints->min_width,  constraints->max_width,
               constraints->min_height, constraints->max_height);
        err = AVERROR(EINVAL);
        goto fail;
    }

    av_freep(&hwconfig);
    av_hwframe_constraints_free(&constraints);

    ctx->recon_frames_ref = av_hwframe_ctx_alloc(ctx->device_ref);
    if (!ctx->recon_frames_ref) {
        err = AVERROR(ENOMEM);
        goto fail;
    }
    ctx->recon_frames = (AVHWFramesContext*)ctx->recon_frames_ref->data;

    ctx->recon_frames->format    = AV_PIX_FMT_VAAPI;
    ctx->recon_frames->sw_format = recon_format;
    ctx->recon_frames->width     = ctx->aligned_width;
    ctx->recon_frames->height    = ctx->aligned_height;
    ctx->recon_frames->initial_pool_size = ctx->nb_recon_frames;

    err = av_hwframe_ctx_init(ctx->recon_frames_ref);
    if (err < 0) {
        av_log(avctx, AV_LOG_ERROR, "Failed to initialise reconstructed "
               "frame context: %d.\n", err);
        goto fail;
    }
    recon_hwctx = ctx->recon_frames->hwctx;

    vas = vaCreateContext(ctx->hwctx->display, ctx->va_config,
                          ctx->aligned_width, ctx->aligned_height,
                          VA_PROGRESSIVE,
                          recon_hwctx->surface_ids,
                          recon_hwctx->nb_surfaces,
                          &ctx->va_context);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "Failed to create encode pipeline "
               "context: %d (%s).\n", vas, vaErrorStr(vas));
        err = AVERROR(EIO);
        goto fail;
    }

    ctx->input_order  = 0;
    ctx->output_delay = avctx->max_b_frames;
    ctx->decode_delay = 1;
    ctx->output_order = - ctx->output_delay - 1;

    if (ctx->codec->sequence_params_size > 0) {
        ctx->codec_sequence_params =
            av_mallocz(ctx->codec->sequence_params_size);
        if (!ctx->codec_sequence_params) {
            err = AVERROR(ENOMEM);
            goto fail;
        }
    }
    if (ctx->codec->picture_params_size > 0) {
        ctx->codec_picture_params =
            av_mallocz(ctx->codec->picture_params_size);
        if (!ctx->codec_picture_params) {
            err = AVERROR(ENOMEM);
            goto fail;
        }
    }

    if (ctx->codec->init_sequence_params) {
        err = ctx->codec->init_sequence_params(avctx);
        if (err < 0) {
            av_log(avctx, AV_LOG_ERROR, "Codec sequence initialisation "
                   "failed: %d.\n", err);
            goto fail;
        }
    }

    // All I are IDR for now.
    ctx->i_per_idr = 0;
    ctx->p_per_i = ((avctx->gop_size + avctx->max_b_frames) /
                    (avctx->max_b_frames + 1));
    ctx->b_per_p = avctx->max_b_frames;

    // This should be configurable somehow.  (Needs testing on a machine
    // where it actually overlaps properly, though.)
    ctx->issue_mode = ISSUE_MODE_MAXIMISE_THROUGHPUT;

    return 0;

fail:
    av_freep(&hwconfig);
    av_hwframe_constraints_free(&constraints);
    ff_vaapi_encode_close(avctx);
    return err;
}
Beispiel #12
0
static int vdpau_alloc(AVCodecContext *s)
{
    int loglevel = AV_LOG_ERROR;
    VDPAUContext *ctx;
    const char *display, *vendor;
    VdpStatus err;
    int ret;

    VdpDevice                device;
    VdpGetProcAddress       *get_proc_address;
    VdpGetInformationString *get_information_string;

    VDPAUHWDevicePriv    *device_priv = NULL;
    AVHWDeviceContext    *device_ctx;
    AVVDPAUDeviceContext *device_hwctx;
    AVHWFramesContext    *frames_ctx;

    ctx = av_mallocz(sizeof(*ctx));
    if (!ctx)
        return AVERROR(ENOMEM);

    device_priv = av_mallocz(sizeof(*device_priv));
    if (!device_priv) {
        av_freep(&ctx);
        goto fail;
    }

    device_priv->dpy = XOpenDisplay(":0");
    if (!device_priv->dpy) {
        av_log(NULL, loglevel, "Cannot open the X11 display %s.\n",
               XDisplayName(":0"));
        goto fail;
    }
    display = XDisplayString(device_priv->dpy);

    err = vdp_device_create_x11(device_priv->dpy, XDefaultScreen(device_priv->dpy),
                                &device, &get_proc_address);
    if (err != VDP_STATUS_OK) {
        av_log(NULL, loglevel, "VDPAU device creation on X11 display %s failed.\n",
               display);
        goto fail;
    }

#define GET_CALLBACK(id, result)                                                \
do {                                                                            \
    void *tmp;                                                                  \
    err = get_proc_address(device, id, &tmp);                                   \
    if (err != VDP_STATUS_OK) {                                                 \
        av_log(NULL, loglevel, "Error getting the " #id " callback.\n");        \
        goto fail;                                                              \
    }                                                                           \
    result = tmp;                                                               \
} while (0)

    GET_CALLBACK(VDP_FUNC_ID_GET_INFORMATION_STRING, get_information_string);
    GET_CALLBACK(VDP_FUNC_ID_DEVICE_DESTROY,         device_priv->device_destroy);

    device_ref = av_hwdevice_ctx_alloc(AV_HWDEVICE_TYPE_VDPAU);
    if (!device_ref)
        goto fail;
    device_ctx                     = (AVHWDeviceContext*)device_ref->data;
    device_hwctx                   = device_ctx->hwctx;
    device_ctx->user_opaque        = device_priv;
    device_ctx->free               = device_free;
    device_hwctx->device           = device;
    device_hwctx->get_proc_address = get_proc_address;

    device_priv = NULL;

    ret = av_hwdevice_ctx_init(device_ref);
    if (ret < 0)
        goto fail;

    ctx->hw_frames_ctx = av_hwframe_ctx_alloc(device_ref);
    if (!ctx->hw_frames_ctx)
        goto fail;
    //av_buffer_unref(&device_ref);

    frames_ctx            = (AVHWFramesContext*)ctx->hw_frames_ctx->data;
    frames_ctx->format    = AV_PIX_FMT_VDPAU;
    frames_ctx->sw_format = s->sw_pix_fmt;
    frames_ctx->width     = 1920;
    frames_ctx->height    = 1080;

    ret = av_hwframe_ctx_init(ctx->hw_frames_ctx);
    if (ret < 0)
        goto fail;

    if (av_vdpau_bind_context(s, device, get_proc_address, 0))
        goto fail;

    s->opaque = ctx;

    return 0;

fail:
    if (device_priv) {
        if (device_priv->device_destroy)
            device_priv->device_destroy(device);
        if (device_priv->dpy)
            XCloseDisplay(device_priv->dpy);
    }
    av_freep(&device_priv);
    av_buffer_unref(&device_ref);
    vdpau_uninit(s);
    return AVERROR(EINVAL);
}
Beispiel #13
0
static bool vaapi_init_codec(struct vaapi_encoder *enc, const char *path)
{
	int ret;

	ret = av_hwdevice_ctx_create(&enc->vadevice_ref, AV_HWDEVICE_TYPE_VAAPI,
			path, NULL, 0);
	if (ret < 0) {
		warn("Failed to create VAAPI device context: %s",
				av_err2str(ret));
		return false;
	}

	enc->vaframes_ref = av_hwframe_ctx_alloc(enc->vadevice_ref);
	if (!enc->vaframes_ref) {
		warn("Failed to alloc HW frames context");
		return false;
	}

	AVHWFramesContext *frames_ctx =
			(AVHWFramesContext *)enc->vaframes_ref->data;
	frames_ctx->format            = AV_PIX_FMT_VAAPI;
	frames_ctx->sw_format         = AV_PIX_FMT_NV12;
	frames_ctx->width             = enc->context->width;
	frames_ctx->height            = enc->context->height;
	frames_ctx->initial_pool_size = 20;

	ret = av_hwframe_ctx_init(enc->vaframes_ref);
	if (ret < 0) {
		warn("Failed to init HW frames context: %s", av_err2str(ret));
		return false;
	}

	/* 2. Create software frame and picture */
	enc->vframe = av_frame_alloc();
	if (!enc->vframe) {
		warn("Failed to allocate video frame");
		return false;
	}

	enc->vframe->format = enc->context->pix_fmt;
	enc->vframe->width  = enc->context->width;
	enc->vframe->height = enc->context->height;
	enc->vframe->colorspace  = enc->context->colorspace;
	enc->vframe->color_range = enc->context->color_range;

	ret = av_frame_get_buffer(enc->vframe, base_get_alignment());
	if (ret < 0) {
		warn("Failed to allocate vframe: %s", av_err2str(ret));
		return false;
	}

	/* 3. set up codec */
	enc->context->pix_fmt       = AV_PIX_FMT_VAAPI;
	enc->context->hw_frames_ctx = av_buffer_ref(enc->vaframes_ref);

	ret = avcodec_open2(enc->context, enc->vaapi, NULL);
	if (ret < 0) {
		warn("Failed to open VAAPI codec: %s", av_err2str(ret));
		return false;
	}

	enc->initialized = true;
	return true;
}
Beispiel #14
0
static int deint_vaapi_config_output(AVFilterLink *outlink)
{
    AVFilterContext    *avctx = outlink->src;
    AVFilterLink      *inlink = avctx->inputs[0];
    DeintVAAPIContext    *ctx = avctx->priv;
    AVVAAPIHWConfig *hwconfig = NULL;
    AVHWFramesConstraints *constraints = NULL;
    AVVAAPIFramesContext *va_frames;
    VAStatus vas;
    int err;

    deint_vaapi_pipeline_uninit(avctx);

    av_assert0(ctx->input_frames);
    ctx->device_ref = av_buffer_ref(ctx->input_frames->device_ref);
    ctx->hwctx = ((AVHWDeviceContext*)ctx->device_ref->data)->hwctx;

    ctx->output_width  = ctx->input_frames->width;
    ctx->output_height = ctx->input_frames->height;

    av_assert0(ctx->va_config == VA_INVALID_ID);
    vas = vaCreateConfig(ctx->hwctx->display, VAProfileNone,
                         VAEntrypointVideoProc, 0, 0, &ctx->va_config);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "Failed to create processing pipeline "
               "config: %d (%s).\n", vas, vaErrorStr(vas));
        err = AVERROR(EIO);
        goto fail;
    }

    hwconfig = av_hwdevice_hwconfig_alloc(ctx->device_ref);
    if (!hwconfig) {
        err = AVERROR(ENOMEM);
        goto fail;
    }
    hwconfig->config_id = ctx->va_config;

    constraints = av_hwdevice_get_hwframe_constraints(ctx->device_ref,
                                                      hwconfig);
    if (!constraints) {
        err = AVERROR(ENOMEM);
        goto fail;
    }

    if (ctx->output_width  < constraints->min_width  ||
        ctx->output_height < constraints->min_height ||
        ctx->output_width  > constraints->max_width  ||
        ctx->output_height > constraints->max_height) {
        av_log(avctx, AV_LOG_ERROR, "Hardware does not support "
               "deinterlacing to size %dx%d "
               "(constraints: width %d-%d height %d-%d).\n",
               ctx->output_width, ctx->output_height,
               constraints->min_width,  constraints->max_width,
               constraints->min_height, constraints->max_height);
        err = AVERROR(EINVAL);
        goto fail;
    }

    ctx->output_frames_ref = av_hwframe_ctx_alloc(ctx->device_ref);
    if (!ctx->output_frames_ref) {
        av_log(avctx, AV_LOG_ERROR, "Failed to create HW frame context "
               "for output.\n");
        err = AVERROR(ENOMEM);
        goto fail;
    }

    ctx->output_frames = (AVHWFramesContext*)ctx->output_frames_ref->data;

    ctx->output_frames->format    = AV_PIX_FMT_VAAPI;
    ctx->output_frames->sw_format = ctx->input_frames->sw_format;
    ctx->output_frames->width     = ctx->output_width;
    ctx->output_frames->height    = ctx->output_height;

    // The number of output frames we need is determined by what follows
    // the filter.  If it's an encoder with complex frame reference
    // structures then this could be very high.
    ctx->output_frames->initial_pool_size = 10;

    err = av_hwframe_ctx_init(ctx->output_frames_ref);
    if (err < 0) {
        av_log(avctx, AV_LOG_ERROR, "Failed to initialise VAAPI frame "
               "context for output: %d\n", err);
        goto fail;
    }

    va_frames = ctx->output_frames->hwctx;

    av_assert0(ctx->va_context == VA_INVALID_ID);
    vas = vaCreateContext(ctx->hwctx->display, ctx->va_config,
                          ctx->output_width, ctx->output_height, 0,
                          va_frames->surface_ids, va_frames->nb_surfaces,
                          &ctx->va_context);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "Failed to create processing pipeline "
               "context: %d (%s).\n", vas, vaErrorStr(vas));
        err = AVERROR(EIO);
        goto fail;
    }

    err = deint_vaapi_build_filter_params(avctx);
    if (err < 0)
        goto fail;

    outlink->w = inlink->w;
    outlink->h = inlink->h;

    outlink->time_base  = av_mul_q(inlink->time_base,
                                   (AVRational) { 1, ctx->field_rate });
    outlink->frame_rate = av_mul_q(inlink->frame_rate,
                                   (AVRational) { ctx->field_rate, 1 });

    outlink->hw_frames_ctx = av_buffer_ref(ctx->output_frames_ref);
    if (!outlink->hw_frames_ctx) {
        err = AVERROR(ENOMEM);
        goto fail;
    }

    av_freep(&hwconfig);
    av_hwframe_constraints_free(&constraints);
    return 0;

fail:
    av_buffer_unref(&ctx->output_frames_ref);
    av_freep(&hwconfig);
    av_hwframe_constraints_free(&constraints);
    return err;
}
Beispiel #15
0
static int hwmap_config_output(AVFilterLink *outlink)
{
    AVFilterContext *avctx = outlink->src;
    HWMapContext      *ctx = avctx->priv;
    AVFilterLink   *inlink = avctx->inputs[0];
    AVHWFramesContext *hwfc;
    AVBufferRef *device;
    const AVPixFmtDescriptor *desc;
    int err;

    av_log(avctx, AV_LOG_DEBUG, "Configure hwmap %s -> %s.\n",
           av_get_pix_fmt_name(inlink->format),
           av_get_pix_fmt_name(outlink->format));

    av_buffer_unref(&ctx->hwframes_ref);

    device = avctx->hw_device_ctx;

    if (inlink->hw_frames_ctx) {
        hwfc = (AVHWFramesContext*)inlink->hw_frames_ctx->data;

        if (ctx->derive_device_type) {
            enum AVHWDeviceType type;

            type = av_hwdevice_find_type_by_name(ctx->derive_device_type);
            if (type == AV_HWDEVICE_TYPE_NONE) {
                av_log(avctx, AV_LOG_ERROR, "Invalid device type.\n");
                goto fail;
            }

            err = av_hwdevice_ctx_create_derived(&device, type,
                                                 hwfc->device_ref, 0);
            if (err < 0) {
                av_log(avctx, AV_LOG_ERROR, "Failed to created derived "
                       "device context: %d.\n", err);
                goto fail;
            }
        }

        desc = av_pix_fmt_desc_get(outlink->format);
        if (!desc) {
            err = AVERROR(EINVAL);
            goto fail;
        }

        if (inlink->format == hwfc->format &&
            (desc->flags & AV_PIX_FMT_FLAG_HWACCEL) &&
            !ctx->reverse) {
            // Map between two hardware formats (including the case of
            // undoing an existing mapping).

            if (!device) {
                av_log(avctx, AV_LOG_ERROR, "A device reference is "
                       "required to map to a hardware format.\n");
                err = AVERROR(EINVAL);
                goto fail;
            }

            err = av_hwframe_ctx_create_derived(&ctx->hwframes_ref,
                                                outlink->format,
                                                device,
                                                inlink->hw_frames_ctx, 0);
            if (err < 0) {
                av_log(avctx, AV_LOG_ERROR, "Failed to create derived "
                       "frames context: %d.\n", err);
                goto fail;
            }

        } else if (inlink->format == hwfc->format &&
                   (desc->flags & AV_PIX_FMT_FLAG_HWACCEL) &&
                   ctx->reverse) {
            // Map between two hardware formats, but do it in reverse.
            // Make a new hwframe context for the target type, and then
            // overwrite the input hwframe context with a derived context
            // mapped from that back to the source type.
            AVBufferRef *source;
            AVHWFramesContext *frames;

            ctx->hwframes_ref = av_hwframe_ctx_alloc(device);
            if (!ctx->hwframes_ref) {
                err = AVERROR(ENOMEM);
                goto fail;
            }
            frames = (AVHWFramesContext*)ctx->hwframes_ref->data;

            frames->format    = outlink->format;
            frames->sw_format = hwfc->sw_format;
            frames->width     = hwfc->width;
            frames->height    = hwfc->height;
            frames->initial_pool_size = 64;

            err = av_hwframe_ctx_init(ctx->hwframes_ref);
            if (err < 0) {
                av_log(avctx, AV_LOG_ERROR, "Failed to initialise "
                       "target frames context: %d.\n", err);
                goto fail;
            }

            err = av_hwframe_ctx_create_derived(&source,
                                                inlink->format,
                                                hwfc->device_ref,
                                                ctx->hwframes_ref,
                                                ctx->mode);
            if (err < 0) {
                av_log(avctx, AV_LOG_ERROR, "Failed to create "
                       "derived source frames context: %d.\n", err);
                goto fail;
            }

            // Here is the naughty bit.  This overwriting changes what
            // ff_get_video_buffer() in the previous filter returns -
            // it will now give a frame allocated here mapped back to
            // the format it expects.  If there were any additional
            // constraints on the output frames there then this may
            // break nastily.
            av_buffer_unref(&inlink->hw_frames_ctx);
            inlink->hw_frames_ctx = source;

        } else if ((outlink->format == hwfc->format &&
                    inlink->format  == hwfc->sw_format) ||
                   inlink->format == hwfc->format) {
            // Map from a hardware format to a software format, or
            // undo an existing such mapping.

            ctx->hwframes_ref = av_buffer_ref(inlink->hw_frames_ctx);
            if (!ctx->hwframes_ref) {
                err = AVERROR(ENOMEM);
                goto fail;
            }

        } else {
            // Non-matching formats - not supported.

            av_log(avctx, AV_LOG_ERROR, "Unsupported formats for "
                   "hwmap: from %s (%s) to %s.\n",
                   av_get_pix_fmt_name(inlink->format),
                   av_get_pix_fmt_name(hwfc->format),
                   av_get_pix_fmt_name(outlink->format));
            err = AVERROR(EINVAL);
            goto fail;
        }
    } else if (avctx->hw_device_ctx) {
        // Map from a software format to a hardware format.  This
        // creates a new hwframe context like hwupload, but then
        // returns frames mapped from that to the previous link in
        // order to fill them without an additional copy.

        if (!device) {
            av_log(avctx, AV_LOG_ERROR, "A device reference is "
                   "required to create new frames with reverse "
                   "mapping.\n");
            err = AVERROR(EINVAL);
            goto fail;
        }

        ctx->reverse = 1;

        ctx->hwframes_ref = av_hwframe_ctx_alloc(device);
        if (!ctx->hwframes_ref) {
            err = AVERROR(ENOMEM);
            goto fail;
        }
        hwfc = (AVHWFramesContext*)ctx->hwframes_ref->data;

        hwfc->format    = outlink->format;
        hwfc->sw_format = inlink->format;
        hwfc->width     = inlink->w;
        hwfc->height    = inlink->h;

        err = av_hwframe_ctx_init(ctx->hwframes_ref);
        if (err < 0) {
            av_log(avctx, AV_LOG_ERROR, "Failed to create frame "
                   "context for reverse mapping: %d.\n", err);
            goto fail;
        }

    } else {
        av_log(avctx, AV_LOG_ERROR, "Mapping requires a hardware "
               "context (a device, or frames on input).\n");
        return AVERROR(EINVAL);
    }

    outlink->hw_frames_ctx = av_buffer_ref(ctx->hwframes_ref);
    if (!outlink->hw_frames_ctx) {
        err = AVERROR(ENOMEM);
        goto fail;
    }

    outlink->w = inlink->w;
    outlink->h = inlink->h;

    return 0;

fail:
    av_buffer_unref(&ctx->hwframes_ref);
    return err;
}
Beispiel #16
0
int av_hwframe_ctx_create_derived(AVBufferRef **derived_frame_ctx,
                                  enum AVPixelFormat format,
                                  AVBufferRef *derived_device_ctx,
                                  AVBufferRef *source_frame_ctx,
                                  int flags)
{
    AVBufferRef   *dst_ref = NULL;
    AVHWFramesContext *dst = NULL;
    AVHWFramesContext *src = (AVHWFramesContext*)source_frame_ctx->data;
    int ret;

    if (src->internal->source_frames) {
        AVHWFramesContext *src_src =
            (AVHWFramesContext*)src->internal->source_frames->data;
        AVHWDeviceContext *dst_dev =
            (AVHWDeviceContext*)derived_device_ctx->data;

        if (src_src->device_ctx == dst_dev) {
            // This is actually an unmapping, so we just return a
            // reference to the source frame context.
            *derived_frame_ctx =
                av_buffer_ref(src->internal->source_frames);
            if (!*derived_frame_ctx) {
                ret = AVERROR(ENOMEM);
                goto fail;
            }
            return 0;
        }
    }

    dst_ref = av_hwframe_ctx_alloc(derived_device_ctx);
    if (!dst_ref) {
        ret = AVERROR(ENOMEM);
        goto fail;
    }

    dst = (AVHWFramesContext*)dst_ref->data;

    dst->format    = format;
    dst->sw_format = src->sw_format;
    dst->width     = src->width;
    dst->height    = src->height;

    dst->internal->source_frames = av_buffer_ref(source_frame_ctx);
    if (!dst->internal->source_frames) {
        ret = AVERROR(ENOMEM);
        goto fail;
    }

    ret = av_hwframe_ctx_init(dst_ref);
    if (ret)
        goto fail;

    *derived_frame_ctx = dst_ref;
    return 0;

fail:
    if (dst)
        av_buffer_unref(&dst->internal->source_frames);
    av_buffer_unref(&dst_ref);
    return ret;
}
Beispiel #17
0
static int hwmap_config_output(AVFilterLink *outlink)
{
    AVFilterContext *avctx = outlink->src;
    HWMapContext      *ctx = avctx->priv;
    AVFilterLink   *inlink = avctx->inputs[0];
    AVHWFramesContext *hwfc;
    const AVPixFmtDescriptor *desc;
    int err;

    av_log(avctx, AV_LOG_DEBUG, "Configure hwmap %s -> %s.\n",
           av_get_pix_fmt_name(inlink->format),
           av_get_pix_fmt_name(outlink->format));

    if (inlink->hw_frames_ctx) {
        hwfc = (AVHWFramesContext*)inlink->hw_frames_ctx->data;

        desc = av_pix_fmt_desc_get(outlink->format);
        if (!desc)
            return AVERROR(EINVAL);

        if (inlink->format == hwfc->format &&
            (desc->flags & AV_PIX_FMT_FLAG_HWACCEL)) {
            // Map between two hardware formats (including the case of
            // undoing an existing mapping).

            ctx->hwdevice_ref = av_buffer_ref(avctx->hw_device_ctx);
            if (!ctx->hwdevice_ref) {
                err = AVERROR(ENOMEM);
                goto fail;
            }

            err = av_hwframe_ctx_create_derived(&ctx->hwframes_ref,
                                                outlink->format,
                                                ctx->hwdevice_ref,
                                                inlink->hw_frames_ctx, 0);
            if (err < 0)
                goto fail;

        } else if ((outlink->format == hwfc->format &&
                    inlink->format  == hwfc->sw_format) ||
                   inlink->format == hwfc->format) {
            // Map from a hardware format to a software format, or
            // undo an existing such mapping.

            ctx->hwdevice_ref = NULL;

            ctx->hwframes_ref = av_buffer_ref(inlink->hw_frames_ctx);
            if (!ctx->hwframes_ref) {
                err = AVERROR(ENOMEM);
                goto fail;
            }

        } else {
            // Non-matching formats - not supported.

            av_log(avctx, AV_LOG_ERROR, "Unsupported formats for "
                   "hwmap: from %s (%s) to %s.\n",
                   av_get_pix_fmt_name(inlink->format),
                   av_get_pix_fmt_name(hwfc->format),
                   av_get_pix_fmt_name(outlink->format));
            err = AVERROR(EINVAL);
            goto fail;
        }
    } else if (avctx->hw_device_ctx) {
        // Map from a software format to a hardware format.  This
        // creates a new hwframe context like hwupload, but then
        // returns frames mapped from that to the previous link in
        // order to fill them without an additional copy.

        ctx->map_backwards = 1;

        ctx->hwdevice_ref = av_buffer_ref(avctx->hw_device_ctx);
        if (!ctx->hwdevice_ref) {
            err = AVERROR(ENOMEM);
            goto fail;
        }

        ctx->hwframes_ref = av_hwframe_ctx_alloc(ctx->hwdevice_ref);
        if (!ctx->hwframes_ref) {
            err = AVERROR(ENOMEM);
            goto fail;
        }
        hwfc = (AVHWFramesContext*)ctx->hwframes_ref->data;

        hwfc->format    = outlink->format;
        hwfc->sw_format = inlink->format;
        hwfc->width     = inlink->w;
        hwfc->height    = inlink->h;

        err = av_hwframe_ctx_init(ctx->hwframes_ref);
        if (err < 0) {
            av_log(avctx, AV_LOG_ERROR, "Failed to create frame "
                   "context for backward mapping: %d.\n", err);
            goto fail;
        }

    } else {
        av_log(avctx, AV_LOG_ERROR, "Mapping requires a hardware "
               "context (a device, or frames on input).\n");
        return AVERROR(EINVAL);
    }

    outlink->hw_frames_ctx = av_buffer_ref(ctx->hwframes_ref);
    if (!outlink->hw_frames_ctx) {
        err = AVERROR(ENOMEM);
        goto fail;
    }

    outlink->w = inlink->w;
    outlink->h = inlink->h;

    return 0;

fail:
    av_buffer_unref(&ctx->hwframes_ref);
    av_buffer_unref(&ctx->hwdevice_ref);
    return err;
}
Beispiel #18
0
int ff_vdpau_common_init(AVCodecContext *avctx, VdpDecoderProfile profile,
                         int level)
{
    VDPAUHWContext *hwctx = avctx->hwaccel_context;
    VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data;
    VdpVideoSurfaceQueryCapabilities *surface_query_caps;
    VdpDecoderQueryCapabilities *decoder_query_caps;
    VdpDecoderCreate *create;
    void *func;
    VdpStatus status;
    VdpBool supported;
    uint32_t max_level, max_mb, max_width, max_height;
    VdpChromaType type;
    uint32_t width;
    uint32_t height;

    vdctx->width            = UINT32_MAX;
    vdctx->height           = UINT32_MAX;

    if (av_vdpau_get_surface_parameters(avctx, &type, &width, &height))
        return AVERROR(ENOSYS);

    if (hwctx) {
        hwctx->reset            = 0;

        if (hwctx->context.decoder != VDP_INVALID_HANDLE) {
            vdctx->decoder = hwctx->context.decoder;
            vdctx->render  = hwctx->context.render;
            vdctx->device  = VDP_INVALID_HANDLE;
            return 0; /* Decoder created by user */
        }

        vdctx->device           = hwctx->device;
        vdctx->get_proc_address = hwctx->get_proc_address;

        if (hwctx->flags & AV_HWACCEL_FLAG_IGNORE_LEVEL)
            level = 0;

        if (!(hwctx->flags & AV_HWACCEL_FLAG_ALLOW_HIGH_DEPTH) &&
            type != VDP_CHROMA_TYPE_420)
            return AVERROR(ENOSYS);
    } else {
        AVHWFramesContext *frames_ctx = NULL;
        AVVDPAUDeviceContext *dev_ctx;

        // We assume the hw_frames_ctx always survives until ff_vdpau_common_uninit
        // is called. This holds true as the user is not allowed to touch
        // hw_device_ctx, or hw_frames_ctx after get_format (and ff_get_format
        // itself also uninits before unreffing hw_frames_ctx).
        if (avctx->hw_frames_ctx) {
            frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
        } else if (avctx->hw_device_ctx) {
            int ret;

            avctx->hw_frames_ctx = av_hwframe_ctx_alloc(avctx->hw_device_ctx);
            if (!avctx->hw_frames_ctx)
                return AVERROR(ENOMEM);

            frames_ctx            = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
            frames_ctx->format    = AV_PIX_FMT_VDPAU;
            frames_ctx->sw_format = avctx->sw_pix_fmt;
            frames_ctx->width     = avctx->coded_width;
            frames_ctx->height    = avctx->coded_height;

            ret = av_hwframe_ctx_init(avctx->hw_frames_ctx);
            if (ret < 0) {
                av_buffer_unref(&avctx->hw_frames_ctx);
                return ret;
            }
        }

        if (!frames_ctx) {
            av_log(avctx, AV_LOG_ERROR, "A hardware frames context is "
                   "required for VDPAU decoding.\n");
            return AVERROR(EINVAL);
        }

        dev_ctx = frames_ctx->device_ctx->hwctx;

        vdctx->device           = dev_ctx->device;
        vdctx->get_proc_address = dev_ctx->get_proc_address;

        if (avctx->hwaccel_flags & AV_HWACCEL_FLAG_IGNORE_LEVEL)
            level = 0;
    }

    if (level < 0)
        return AVERROR(ENOTSUP);

    status = vdctx->get_proc_address(vdctx->device,
                                     VDP_FUNC_ID_VIDEO_SURFACE_QUERY_CAPABILITIES,
                                     &func);
    if (status != VDP_STATUS_OK)
        return vdpau_error(status);
    else
        surface_query_caps = func;

    status = surface_query_caps(vdctx->device, type, &supported,
                                &max_width, &max_height);
    if (status != VDP_STATUS_OK)
        return vdpau_error(status);
    if (supported != VDP_TRUE ||
        max_width < width || max_height < height)
        return AVERROR(ENOTSUP);

    status = vdctx->get_proc_address(vdctx->device,
                                     VDP_FUNC_ID_DECODER_QUERY_CAPABILITIES,
                                     &func);
    if (status != VDP_STATUS_OK)
        return vdpau_error(status);
    else
        decoder_query_caps = func;

    status = decoder_query_caps(vdctx->device, profile, &supported, &max_level,
                                &max_mb, &max_width, &max_height);
#ifdef VDP_DECODER_PROFILE_H264_CONSTRAINED_BASELINE
    if ((status != VDP_STATUS_OK || supported != VDP_TRUE) && profile == VDP_DECODER_PROFILE_H264_CONSTRAINED_BASELINE) {
        profile = VDP_DECODER_PROFILE_H264_MAIN;
        status = decoder_query_caps(vdctx->device, profile, &supported,
                                    &max_level, &max_mb,
                                    &max_width, &max_height);
    }
#endif
    if (status != VDP_STATUS_OK)
        return vdpau_error(status);

    if (supported != VDP_TRUE || max_level < level ||
        max_width < width || max_height < height)
        return AVERROR(ENOTSUP);

    status = vdctx->get_proc_address(vdctx->device, VDP_FUNC_ID_DECODER_CREATE,
                                     &func);
    if (status != VDP_STATUS_OK)
        return vdpau_error(status);
    else
        create = func;

    status = vdctx->get_proc_address(vdctx->device, VDP_FUNC_ID_DECODER_RENDER,
                                     &func);
    if (status != VDP_STATUS_OK)
        return vdpau_error(status);
    else
        vdctx->render = func;

    status = create(vdctx->device, profile, width, height, avctx->refs,
                    &vdctx->decoder);
    if (status == VDP_STATUS_OK) {
        vdctx->width  = avctx->coded_width;
        vdctx->height = avctx->coded_height;
    }

    return vdpau_error(status);
}
Beispiel #19
0
bool VideoEncoderFFmpegPrivate::open()
{
    nb_encoded = 0LL;
    if (codec_name.isEmpty()) {
        // copy ctx from muxer by copyAVCodecContext
        AVCodec *codec = avcodec_find_encoder(avctx->codec_id);
        AV_ENSURE_OK(avcodec_open2(avctx, codec, &dict), false);
        return true;
    }
    AVCodec *codec = avcodec_find_encoder_by_name(codec_name.toUtf8().constData());
    if (!codec) {
        const AVCodecDescriptor* cd = avcodec_descriptor_get_by_name(codec_name.toUtf8().constData());
        if (cd) {
            codec = avcodec_find_encoder(cd->id);
        }
    }
    if (!codec) {
        qWarning() << "Can not find encoder for codec " << codec_name;
        return false;
    }
    if (avctx) {
        avcodec_free_context(&avctx);
        avctx = 0;
    }
    avctx = avcodec_alloc_context3(codec);
    avctx->width = width; // coded_width works, why?
    avctx->height = height;
    // reset format_used to user defined format. important to update default format if format is invalid
    format_used = VideoFormat::Format_Invalid;
    AVPixelFormat fffmt = (AVPixelFormat)format.pixelFormatFFmpeg();
    if (codec->pix_fmts && format.isValid()) {
        for (int i = 0; codec->pix_fmts[i] != AVPixelFormat(-1); ++i) {
            if (fffmt == codec->pix_fmts[i]) {
                format_used = format.pixelFormat();
                break;
            }
        }
    }
    //avctx->sample_aspect_ratio =
    AVPixelFormat hwfmt = AVPixelFormat(-1);
    if (av_pix_fmt_desc_get(codec->pix_fmts[0])->flags & AV_PIX_FMT_FLAG_HWACCEL)
        hwfmt = codec->pix_fmts[0];
    bool use_hwctx = false;
    if (hwfmt != AVPixelFormat(-1)) {
#ifdef HAVE_AVHWCTX
        const AVHWDeviceType dt = fromHWAName(codec_name.section(QChar('_'), -1).toUtf8().constData());
        if (dt != AVHWDeviceType(-1)) {
            use_hwctx = true;
            avctx->pix_fmt = hwfmt;
            hw_device_ctx = NULL;
            AV_ENSURE(av_hwdevice_ctx_create(&hw_device_ctx, dt, hwdev.toLatin1().constData(), NULL, 0), false);
            avctx->hw_frames_ctx = av_hwframe_ctx_alloc(hw_device_ctx);
            if (!avctx->hw_frames_ctx) {
                qWarning("Failed to create hw frame context for '%s'", codec_name.toLatin1().constData());
                return false;
            }
            // get sw formats
            const void *hwcfg = NULL;
            AVHWFramesConstraints *constraints = av_hwdevice_get_hwframe_constraints(hw_device_ctx, hwcfg);
            const AVPixelFormat* in_fmts = constraints->valid_sw_formats;
            AVPixelFormat sw_fmt = AVPixelFormat(-1);
            if (in_fmts) {
                sw_fmt = in_fmts[0];
                while (*in_fmts != AVPixelFormat(-1)) {
                    if (*in_fmts == fffmt)
                        sw_fmt = *in_fmts;
                    sw_fmts.append(*in_fmts);
                    ++in_fmts;
                }
            } else {
                sw_fmt = QTAV_PIX_FMT_C(YUV420P);
            }
            av_hwframe_constraints_free(&constraints);
            format_used = VideoFormat::pixelFormatFromFFmpeg(sw_fmt);
            // encoder surface pool parameters
            AVHWFramesContext* hwfs = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
            hwfs->format = hwfmt; // must the same as avctx->pix_fmt
            hwfs->sw_format = sw_fmt; // if it's not set, vaapi will choose the last valid_sw_formats, but that's wrong for vaGetImage/DeriveImage. nvenc always need sw_format
            // hw upload parameters. encoder's hwframes is just for parameter checking, will never be intialized, so we allocate an individual one.
            hwframes_ref = av_hwframe_ctx_alloc(hw_device_ctx);
            if (!hwframes_ref) {
                qWarning("Failed to create hw frame context for uploading '%s'", codec_name.toLatin1().constData());
            } else {
                hwframes = (AVHWFramesContext*)hwframes_ref->data;
                hwframes->format = hwfmt;
            }
        }
#endif //HAVE_AVHWCTX
    }

    if (!use_hwctx) { // no hw device (videotoolbox, wrong device name etc.), or old ffmpeg
        // TODO: check frame is hw frame
        if (hwfmt == AVPixelFormat(-1)) { // sw enc
            if (format_used == VideoFormat::Format_Invalid) {// requested format is not supported by sw enc
                if (codec->pix_fmts) { //pix_fmts[0] is always a sw format here
                    qDebug("use first supported pixel format '%d' for sw encoder", codec->pix_fmts[0]);
                    format_used = VideoFormat::pixelFormatFromFFmpeg((int)codec->pix_fmts[0]);
                }
            }
        } else {
            if (format_used == VideoFormat::Format_Invalid) { // requested format is not supported by hw enc
                qDebug("use first supported sw pixel format '%d' for hw encoder", codec->pix_fmts[1]);
                if (codec->pix_fmts && codec->pix_fmts[1] != AVPixelFormat(-1))
                    format_used = VideoFormat::pixelFormatFromFFmpeg(codec->pix_fmts[1]);
            }
        }
        if (format_used == VideoFormat::Format_Invalid) {
            qWarning("fallback to yuv420p");
            format_used = VideoFormat::Format_YUV420P;
        }
        avctx->pix_fmt = (AVPixelFormat)VideoFormat::pixelFormatToFFmpeg(format_used);
    }
    if (frame_rate > 0)
        avctx->time_base = av_d2q(1.0/frame_rate, frame_rate*1001.0+2);
    else
        avctx->time_base = av_d2q(1.0/VideoEncoder::defaultFrameRate(), VideoEncoder::defaultFrameRate()*1001.0+2);
    qDebug("size: %dx%d tbc: %f=%d/%d", width, height, av_q2d(avctx->time_base), avctx->time_base.num, avctx->time_base.den);
    avctx->bit_rate = bit_rate;
    //AVDictionary *dict = 0;
    if(avctx->codec_id == QTAV_CODEC_ID(H264)) {
        avctx->gop_size = 10;
        //avctx->max_b_frames = 3;//h264
        av_dict_set(&dict, "preset", "fast", 0); //x264
        av_dict_set(&dict, "tune", "zerolatency", 0);  //x264
        //av_dict_set(&dict, "profile", "main", 0); // conflict with vaapi (int values)
    }
    if(avctx->codec_id == AV_CODEC_ID_HEVC){
        av_dict_set(&dict, "preset", "ultrafast", 0);
        av_dict_set(&dict, "tune", "zero-latency", 0);
    }
    if (avctx->codec_id == AV_CODEC_ID_MPEG2VIDEO) {
        av_dict_set(&dict, "strict", "-2", 0); // mpeg2 arbitrary fps
    }
    applyOptionsForContext();
    AV_ENSURE_OK(avcodec_open2(avctx, codec, &dict), false);
    // from mpv ao_lavc
    const int buffer_size = qMax<int>(qMax<int>(width*height*6+200, FF_MIN_BUFFER_SIZE), sizeof(AVPicture));//??
    buffer.resize(buffer_size);
    return true;
}
Beispiel #20
0
/* create the QSV session */
static int init_vpp_session(AVFilterContext *avctx, QSVVPPContext *s)
{
    AVFilterLink                 *inlink = avctx->inputs[0];
    AVFilterLink                *outlink = avctx->outputs[0];
    AVQSVFramesContext  *in_frames_hwctx = NULL;
    AVQSVFramesContext *out_frames_hwctx = NULL;

    AVBufferRef *device_ref;
    AVHWDeviceContext *device_ctx;
    AVQSVDeviceContext *device_hwctx;
    mfxHDL handle;
    mfxHandleType handle_type;
    mfxVersion ver;
    mfxIMPL impl;
    int ret, i;

    if (inlink->hw_frames_ctx) {
        AVHWFramesContext *frames_ctx = (AVHWFramesContext *)inlink->hw_frames_ctx->data;

        device_ref      = frames_ctx->device_ref;
        in_frames_hwctx = frames_ctx->hwctx;

        s->in_mem_mode = in_frames_hwctx->frame_type;

        s->surface_ptrs_in = av_mallocz_array(in_frames_hwctx->nb_surfaces,
                                              sizeof(*s->surface_ptrs_in));
        if (!s->surface_ptrs_in)
            return AVERROR(ENOMEM);

        for (i = 0; i < in_frames_hwctx->nb_surfaces; i++)
            s->surface_ptrs_in[i] = in_frames_hwctx->surfaces + i;

        s->nb_surface_ptrs_in = in_frames_hwctx->nb_surfaces;
    } else if (avctx->hw_device_ctx) {
        device_ref     = avctx->hw_device_ctx;
        s->in_mem_mode = MFX_MEMTYPE_SYSTEM_MEMORY;
    } else {
        av_log(avctx, AV_LOG_ERROR, "No hw context provided.\n");
        return AVERROR(EINVAL);
    }

    device_ctx   = (AVHWDeviceContext *)device_ref->data;
    device_hwctx = device_ctx->hwctx;

    if (outlink->format == AV_PIX_FMT_QSV) {
        AVHWFramesContext *out_frames_ctx;
        AVBufferRef *out_frames_ref = av_hwframe_ctx_alloc(device_ref);
        if (!out_frames_ref)
            return AVERROR(ENOMEM);

        s->out_mem_mode = IS_OPAQUE_MEMORY(s->in_mem_mode) ?
                          MFX_MEMTYPE_OPAQUE_FRAME :
                          MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;

        out_frames_ctx   = (AVHWFramesContext *)out_frames_ref->data;
        out_frames_hwctx = out_frames_ctx->hwctx;

        out_frames_ctx->format            = AV_PIX_FMT_QSV;
        out_frames_ctx->width             = FFALIGN(outlink->w, 32);
        out_frames_ctx->height            = FFALIGN(outlink->h, 32);
        out_frames_ctx->sw_format         = s->out_sw_format;
        out_frames_ctx->initial_pool_size = 64;
        out_frames_hwctx->frame_type      = s->out_mem_mode;

        ret = av_hwframe_ctx_init(out_frames_ref);
        if (ret < 0) {
            av_buffer_unref(&out_frames_ref);
            av_log(avctx, AV_LOG_ERROR, "Error creating frames_ctx for output pad.\n");
            return ret;
        }

        s->surface_ptrs_out = av_mallocz_array(out_frames_hwctx->nb_surfaces,
                                               sizeof(*s->surface_ptrs_out));
        if (!s->surface_ptrs_out) {
            av_buffer_unref(&out_frames_ref);
            return AVERROR(ENOMEM);
        }

        for (i = 0; i < out_frames_hwctx->nb_surfaces; i++)
            s->surface_ptrs_out[i] = out_frames_hwctx->surfaces + i;
        s->nb_surface_ptrs_out = out_frames_hwctx->nb_surfaces;

        av_buffer_unref(&outlink->hw_frames_ctx);
        outlink->hw_frames_ctx = out_frames_ref;
    } else
        s->out_mem_mode = MFX_MEMTYPE_SYSTEM_MEMORY;

    /* extract the properties of the "master" session given to us */
    ret = MFXQueryIMPL(device_hwctx->session, &impl);
    if (ret == MFX_ERR_NONE)
        ret = MFXQueryVersion(device_hwctx->session, &ver);
    if (ret != MFX_ERR_NONE) {
        av_log(avctx, AV_LOG_ERROR, "Error querying the session attributes\n");
        return AVERROR_UNKNOWN;
    }

    for (i = 0; i < FF_ARRAY_ELEMS(handle_types); i++) {
        ret = MFXVideoCORE_GetHandle(device_hwctx->session, handle_types[i], &handle);
        if (ret == MFX_ERR_NONE) {
            handle_type = handle_types[i];
            break;
        }
    }

    if (ret != MFX_ERR_NONE) {
        av_log(avctx, AV_LOG_ERROR, "Error getting the session handle\n");
        return AVERROR_UNKNOWN;
    }

    /* create a "slave" session with those same properties, to be used for vpp */
    ret = MFXInit(impl, &ver, &s->session);
    if (ret != MFX_ERR_NONE) {
        av_log(avctx, AV_LOG_ERROR, "Error initializing a session for scaling\n");
        return AVERROR_UNKNOWN;
    }

    if (handle) {
        ret = MFXVideoCORE_SetHandle(s->session, handle_type, handle);
        if (ret != MFX_ERR_NONE)
            return AVERROR_UNKNOWN;
    }

    if (QSV_RUNTIME_VERSION_ATLEAST(ver, 1, 25)) {
        ret = MFXJoinSession(device_hwctx->session, s->session);
        if (ret != MFX_ERR_NONE)
            return AVERROR_UNKNOWN;
    }

    if (IS_OPAQUE_MEMORY(s->in_mem_mode) || IS_OPAQUE_MEMORY(s->out_mem_mode)) {
        s->opaque_alloc.In.Surfaces   = s->surface_ptrs_in;
        s->opaque_alloc.In.NumSurface = s->nb_surface_ptrs_in;
        s->opaque_alloc.In.Type       = s->in_mem_mode;

        s->opaque_alloc.Out.Surfaces   = s->surface_ptrs_out;
        s->opaque_alloc.Out.NumSurface = s->nb_surface_ptrs_out;
        s->opaque_alloc.Out.Type       = s->out_mem_mode;

        s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
        s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
    } else if (IS_VIDEO_MEMORY(s->in_mem_mode) || IS_VIDEO_MEMORY(s->out_mem_mode)) {
        mfxFrameAllocator frame_allocator = {
            .pthis  = s,
            .Alloc  = frame_alloc,
            .Lock   = frame_lock,
            .Unlock = frame_unlock,
            .GetHDL = frame_get_hdl,
            .Free   = frame_free,
        };

        ret = MFXVideoCORE_SetFrameAllocator(s->session, &frame_allocator);
        if (ret != MFX_ERR_NONE)
            return AVERROR_UNKNOWN;
    }
Beispiel #21
0
static int qsv_init_child_ctx(AVHWFramesContext *ctx)
{
    AVQSVFramesContext     *hwctx = ctx->hwctx;
    QSVFramesContext           *s = ctx->internal->priv;
    QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;

    AVBufferRef *child_device_ref = NULL;
    AVBufferRef *child_frames_ref = NULL;

    AVHWDeviceContext *child_device_ctx;
    AVHWFramesContext *child_frames_ctx;

    int i, ret = 0;

    if (!device_priv->handle) {
        av_log(ctx, AV_LOG_ERROR,
               "Cannot create a non-opaque internal surface pool without "
               "a hardware handle\n");
        return AVERROR(EINVAL);
    }

    child_device_ref = av_hwdevice_ctx_alloc(device_priv->child_device_type);
    if (!child_device_ref)
        return AVERROR(ENOMEM);
    child_device_ctx   = (AVHWDeviceContext*)child_device_ref->data;

#if CONFIG_VAAPI
    if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
        AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
        child_device_hwctx->display = (VADisplay)device_priv->handle;
    }
#endif
#if CONFIG_DXVA2
    if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
        AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
        child_device_hwctx->devmgr = (IDirect3DDeviceManager9*)device_priv->handle;
    }
#endif

    ret = av_hwdevice_ctx_init(child_device_ref);
    if (ret < 0) {
        av_log(ctx, AV_LOG_ERROR, "Error initializing a child device context\n");
        goto fail;
    }

    child_frames_ref = av_hwframe_ctx_alloc(child_device_ref);
    if (!child_frames_ref) {
        ret = AVERROR(ENOMEM);
        goto fail;
    }
    child_frames_ctx = (AVHWFramesContext*)child_frames_ref->data;

    child_frames_ctx->format            = device_priv->child_pix_fmt;
    child_frames_ctx->sw_format         = ctx->sw_format;
    child_frames_ctx->initial_pool_size = ctx->initial_pool_size;
    child_frames_ctx->width             = FFALIGN(ctx->width, 16);
    child_frames_ctx->height            = FFALIGN(ctx->height, 16);

#if CONFIG_DXVA2
    if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
        AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
        if (hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)
            child_frames_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
        else
            child_frames_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
    }
#endif

    ret = av_hwframe_ctx_init(child_frames_ref);
    if (ret < 0) {
        av_log(ctx, AV_LOG_ERROR, "Error initializing a child frames context\n");
        goto fail;
    }

#if CONFIG_VAAPI
    if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
        AVVAAPIFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
        for (i = 0; i < ctx->initial_pool_size; i++)
            s->surfaces_internal[i].Data.MemId = child_frames_hwctx->surface_ids + i;
        hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
    }
#endif
#if CONFIG_DXVA2
    if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
        AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
        for (i = 0; i < ctx->initial_pool_size; i++)
            s->surfaces_internal[i].Data.MemId = (mfxMemId)child_frames_hwctx->surfaces[i];
        if (child_frames_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
            hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
        else
            hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
    }
#endif

    s->child_frames_ref       = child_frames_ref;
    child_frames_ref          = NULL;

fail:
    av_buffer_unref(&child_device_ref);
    av_buffer_unref(&child_frames_ref);
    return ret;
}
Beispiel #22
0
/**
    \fn pre-open
*/
bool ADM_ffVAEncHEVC::configureContext(void)
{
    ADM_info("Configuring context for VAAPI encoder\n");
    ADM_info("Our display: %#x\n",admLibVA::getDisplay());

    _context->bit_rate=VaEncHevcSettings.bitrate*1000;
    _context->rc_max_rate=VaEncHevcSettings.max_bitrate*1000;
    _context->max_b_frames=VaEncHevcSettings.bframes;
    _context->pix_fmt =AV_PIX_FMT_VAAPI;

#define CLEARTEXT(x) char buf[AV_ERROR_MAX_STRING_SIZE]={0}; av_make_error_string(buf,AV_ERROR_MAX_STRING_SIZE,x);
    hwDeviceCtx = av_hwdevice_ctx_alloc(AV_HWDEVICE_TYPE_VAAPI);
    if(!hwDeviceCtx)
    {
        ADM_error("Cannot allocate hw device context.\n");
        return false;
    }

    AVHWDeviceContext *hwctx = (AVHWDeviceContext *)hwDeviceCtx->data;
    AVVAAPIDeviceContext *vactx = (AVVAAPIDeviceContext *)hwctx->hwctx;
    vactx->display = admLibVA::getDisplay();

    int err = av_hwdevice_ctx_init(hwDeviceCtx);
    if(err)
    {
        CLEARTEXT(err)
        ADM_warning("Cannot initialize VAAPI hwdevice (%d, %s)\n",err,buf);
        return false;
    }

    AVBufferRef *hwFramesRef = NULL;
    AVHWFramesContext *hwFramesCtx = NULL;
    hwFramesRef = av_hwframe_ctx_alloc(hwDeviceCtx);
    if(!hwFramesRef)
    {
        ADM_error("Cannot create VAAPI frame context.\n");
        return false;
    }
    hwFramesCtx=(AVHWFramesContext*)(hwFramesRef->data);
    hwFramesCtx->format=AV_PIX_FMT_VAAPI;
    hwFramesCtx->sw_format=AV_PIX_FMT_NV12;
    hwFramesCtx->width=source->getInfo()->width;
    hwFramesCtx->height=source->getInfo()->height;
    hwFramesCtx->initial_pool_size=20;
    err = av_hwframe_ctx_init(hwFramesRef);
    if(err<0)
    {
        CLEARTEXT(err)
        ADM_error("Cannot initialize VAAPI frame context (%d, %s)\n",err,buf);
        av_buffer_unref(&hwFramesRef);
        return false;
    }
    _context->hw_frames_ctx = av_buffer_ref(hwFramesRef);
    if(!_context->hw_frames_ctx)
    {
        ADM_error("hw_frames_ctx is NULL!\n");
        return false;
    }
    av_buffer_unref(&hwFramesRef);
    return true;
}
Beispiel #23
0
static int dxva2_create_decoder(AVCodecContext *s)
{
    InputStream  *ist = s->opaque;
    int loglevel = (ist->hwaccel_id == HWACCEL_AUTO) ? AV_LOG_VERBOSE : AV_LOG_ERROR;
    DXVA2Context *ctx = ist->hwaccel_ctx;
    struct dxva_context *dxva_ctx = s->hwaccel_context;
    GUID *guid_list = NULL;
    unsigned guid_count = 0, i, j;
    GUID device_guid = GUID_NULL;
    const D3DFORMAT surface_format = s->sw_pix_fmt == AV_PIX_FMT_YUV420P10 ?
                                     MKTAG('P', '0', '1', '0') : MKTAG('N', 'V', '1', '2');
    D3DFORMAT target_format = 0;
    DXVA2_VideoDesc desc = { 0 };
    DXVA2_ConfigPictureDecode config;
    HRESULT hr;
    int surface_alignment, num_surfaces;
    int ret;

    AVDXVA2FramesContext *frames_hwctx;
    AVHWFramesContext *frames_ctx;

    hr = IDirectXVideoDecoderService_GetDecoderDeviceGuids(ctx->decoder_service, &guid_count, &guid_list);
    if (FAILED(hr)) {
        av_log(NULL, loglevel, "Failed to retrieve decoder device GUIDs\n");
        goto fail;
    }

    for (i = 0; dxva2_modes[i].guid; i++) {
        D3DFORMAT *target_list = NULL;
        unsigned target_count = 0;
        const dxva2_mode *mode = &dxva2_modes[i];
        if (mode->codec != s->codec_id)
            continue;

        for (j = 0; j < guid_count; j++) {
            if (IsEqualGUID(mode->guid, &guid_list[j]))
                break;
        }
        if (j == guid_count)
            continue;

        hr = IDirectXVideoDecoderService_GetDecoderRenderTargets(ctx->decoder_service, mode->guid, &target_count, &target_list);
        if (FAILED(hr)) {
            continue;
        }
        for (j = 0; j < target_count; j++) {
            const D3DFORMAT format = target_list[j];
            if (format == surface_format) {
                target_format = format;
                break;
            }
        }
        CoTaskMemFree(target_list);
        if (target_format) {
            device_guid = *mode->guid;
            break;
        }
    }
    CoTaskMemFree(guid_list);

    if (IsEqualGUID(&device_guid, &GUID_NULL)) {
        av_log(NULL, loglevel, "No decoder device for codec found\n");
        goto fail;
    }

    desc.SampleWidth  = s->coded_width;
    desc.SampleHeight = s->coded_height;
    desc.Format       = target_format;

    ret = dxva2_get_decoder_configuration(s, &device_guid, &desc, &config);
    if (ret < 0) {
        goto fail;
    }

    /* decoding MPEG-2 requires additional alignment on some Intel GPUs,
       but it causes issues for H.264 on certain AMD GPUs..... */
    if (s->codec_id == AV_CODEC_ID_MPEG2VIDEO)
        surface_alignment = 32;
    /* the HEVC DXVA2 spec asks for 128 pixel aligned surfaces to ensure
       all coding features have enough room to work with */
    else if  (s->codec_id == AV_CODEC_ID_HEVC)
        surface_alignment = 128;
    else
        surface_alignment = 16;

    /* 4 base work surfaces */
    num_surfaces = 4;

    /* add surfaces based on number of possible refs */
    if (s->codec_id == AV_CODEC_ID_H264 || s->codec_id == AV_CODEC_ID_HEVC)
        num_surfaces += 16;
    else
        num_surfaces += 2;

    /* add extra surfaces for frame threading */
    if (s->active_thread_type & FF_THREAD_FRAME)
        num_surfaces += s->thread_count;

    ctx->hw_frames_ctx = av_hwframe_ctx_alloc(ctx->hw_device_ctx);
    if (!ctx->hw_frames_ctx)
        goto fail;
    frames_ctx   = (AVHWFramesContext*)ctx->hw_frames_ctx->data;
    frames_hwctx = frames_ctx->hwctx;

    frames_ctx->format            = AV_PIX_FMT_DXVA2_VLD;
    frames_ctx->sw_format         = s->sw_pix_fmt == AV_PIX_FMT_YUV420P10 ?
                                    AV_PIX_FMT_P010 : AV_PIX_FMT_NV12;
    frames_ctx->width             = FFALIGN(s->coded_width, surface_alignment);
    frames_ctx->height            = FFALIGN(s->coded_height, surface_alignment);
    frames_ctx->initial_pool_size = num_surfaces;

    frames_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;

    ret = av_hwframe_ctx_init(ctx->hw_frames_ctx);
    if (ret < 0) {
        av_log(NULL, loglevel, "Failed to initialize the HW frames context\n");
        goto fail;
    }

    hr = IDirectXVideoDecoderService_CreateVideoDecoder(ctx->decoder_service, &device_guid,
                                                        &desc, &config, frames_hwctx->surfaces,
                                                        frames_hwctx->nb_surfaces, &frames_hwctx->decoder_to_release);
    if (FAILED(hr)) {
        av_log(NULL, loglevel, "Failed to create DXVA2 video decoder\n");
        goto fail;
    }

    ctx->decoder_guid   = device_guid;
    ctx->decoder_config = config;

    dxva_ctx->cfg           = &ctx->decoder_config;
    dxva_ctx->decoder       = frames_hwctx->decoder_to_release;
    dxva_ctx->surface       = frames_hwctx->surfaces;
    dxva_ctx->surface_count = frames_hwctx->nb_surfaces;

    if (IsEqualGUID(&ctx->decoder_guid, &DXVADDI_Intel_ModeH264_E))
        dxva_ctx->workaround |= FF_DXVA2_WORKAROUND_INTEL_CLEARVIDEO;

    return 0;
fail:
    av_buffer_unref(&ctx->hw_frames_ctx);
    return AVERROR(EINVAL);
}