Esempio n. 1
0
static int nvenc_get_frame(AVCodecContext *avctx, AVPacket *pkt)
{
    NVENCContext *ctx               = avctx->priv_data;
    NV_ENCODE_API_FUNCTION_LIST *nv = &ctx->nvel.nvenc_funcs;
    NV_ENC_LOCK_BITSTREAM params    = { 0 };
    NVENCOutputSurface *out         = NULL;
    int ret;

    ret = nvenc_dequeue_surface(ctx->pending, &out);
    if (ret)
        return ret;

    params.version         = NV_ENC_LOCK_BITSTREAM_VER;
    params.outputBitstream = out->out;

    ret = nv->nvEncLockBitstream(ctx->nvenc_ctx, &params);
    if (ret < 0)
        return AVERROR_UNKNOWN;

    ret = ff_alloc_packet(pkt, params.bitstreamSizeInBytes);
    if (ret < 0)
        return ret;

    memcpy(pkt->data, params.bitstreamBufferPtr, pkt->size);

    ret = nv->nvEncUnlockBitstream(ctx->nvenc_ctx, out->out);
    if (ret < 0)
        return AVERROR_UNKNOWN;

    out->busy = out->in->locked = 0;

    ret = nvenc_set_timestamp(ctx, &params, pkt);
    if (ret < 0)
        return ret;

    switch (params.pictureType) {
    case NV_ENC_PIC_TYPE_IDR:
        pkt->flags |= AV_PKT_FLAG_KEY;
#if FF_API_CODED_FRAME
FF_DISABLE_DEPRECATION_WARNINGS
    case NV_ENC_PIC_TYPE_INTRA_REFRESH:
    case NV_ENC_PIC_TYPE_I:
        avctx->coded_frame->pict_type = AV_PICTURE_TYPE_I;
        break;
    case NV_ENC_PIC_TYPE_P:
        avctx->coded_frame->pict_type = AV_PICTURE_TYPE_P;
        break;
    case NV_ENC_PIC_TYPE_B:
        avctx->coded_frame->pict_type = AV_PICTURE_TYPE_B;
        break;
    case NV_ENC_PIC_TYPE_BI:
        avctx->coded_frame->pict_type = AV_PICTURE_TYPE_BI;
        break;
FF_ENABLE_DEPRECATION_WARNINGS
#endif
    }

    return 0;
}
bool fcH264EncoderNVIDIA::encode(fcH264Frame& dst, const void *image, fcPixelFormat fmt, fcTime timestamp, bool force_keyframe)
{
    if (!isValid()) { return false; }

    dst.timestamp = timestamp;

    // convert image to NV12
    AnyToNV12(m_nv12_image, m_rgba_image, image, fmt, m_conf.width, m_conf.height);
    NV12Data data = m_nv12_image.data();

    NVENCSTATUS stat;

    // upload image to input buffer
    {
        NV_ENC_LOCK_INPUT_BUFFER lock_params = { 0 };
        lock_params.version = NV_ENC_LOCK_INPUT_BUFFER_VER;
        lock_params.inputBuffer = m_input.inputBuffer;
        stat = nvenc.nvEncLockInputBuffer(m_encoder, &lock_params);
        memcpy(lock_params.bufferDataPtr, data.y, m_nv12_image.size());
        stat = nvenc.nvEncUnlockInputBuffer(m_encoder, m_input.inputBuffer);
    }


    NV_ENC_PIC_PARAMS params = { 0 };
    params.version = NV_ENC_PIC_PARAMS_VER;
    params.inputBuffer = m_input.inputBuffer;
    params.outputBitstream = m_output.bitstreamBuffer;
    params.bufferFmt = NV_ENC_BUFFER_FORMAT_NV12;
    params.inputWidth = m_conf.width;
    params.inputHeight = m_conf.height;
    params.completionEvent = 0;
    params.pictureStruct = NV_ENC_PIC_STRUCT_FRAME;
    params.encodePicFlags = 0;
    if (force_keyframe) {
        params.encodePicFlags |= NV_ENC_PIC_FLAG_FORCEINTRA;
    }
    params.inputTimeStamp = to_usec(timestamp);
    params.inputDuration = to_usec(1.0 / m_conf.target_framerate);

    // encode! 
    stat = nvenc.nvEncEncodePicture(m_encoder, &params);

    // retrieve encoded data
    {
        NV_ENC_LOCK_BITSTREAM lock_params = { 0 };
        lock_params.version = NV_ENC_LOCK_BITSTREAM_VER;
        lock_params.outputBitstream = m_output.bitstreamBuffer;

        stat = nvenc.nvEncLockBitstream(m_encoder, &lock_params);

        dst.data.append((char*)lock_params.bitstreamBufferPtr, lock_params.bitstreamSizeInBytes);
        dst.gatherNALInformation();

        stat = nvenc.nvEncUnlockBitstream(m_encoder, m_output.bitstreamBuffer);
    }

    return true;
}