예제 #1
0
/* Creates and maps VA buffer */
gboolean
vaapi_create_buffer (VADisplay dpy, VAContextID ctx, int type, guint size,
    gconstpointer buf, VABufferID * buf_id_ptr, gpointer * mapped_data)
{
  VABufferID buf_id;
  VAStatus status;
  gpointer data = (gpointer) buf;

  status = vaCreateBuffer (dpy, ctx, type, size, 1, data, &buf_id);
  if (!vaapi_check_status (status, "vaCreateBuffer()"))
    return FALSE;

  if (mapped_data) {
    data = vaapi_map_buffer (dpy, buf_id);
    if (!data)
      goto error;
    *mapped_data = data;
  }

  *buf_id_ptr = buf_id;
  return TRUE;

error:
  vaapi_destroy_buffer (dpy, &buf_id);
  return FALSE;
}
예제 #2
0
int ff_vaapi_decode_make_param_buffer(AVCodecContext *avctx,
                                      VAAPIDecodePicture *pic,
                                      int type,
                                      const void *data,
                                      size_t size)
{
    VAAPIDecodeContext *ctx = avctx->internal->hwaccel_priv_data;
    VAStatus vas;
    VABufferID buffer;

    av_assert0(pic->nb_param_buffers + 1 <= MAX_PARAM_BUFFERS);

    vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
                         type, size, 1, (void*)data, &buffer);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "Failed to create parameter "
               "buffer (type %d): %d (%s).\n",
               type, vas, vaErrorStr(vas));
        return AVERROR(EIO);
    }

    pic->param_buffers[pic->nb_param_buffers++] = buffer;

    av_log(avctx, AV_LOG_DEBUG, "Param buffer (type %d, %zu bytes) "
           "is %#x.\n", type, size, buffer);
    return 0;
}
예제 #3
0
/* Creates and maps VA buffer */
bool
vaapiCreateBuffer(VADisplay dpy,
                  VAContextID ctx,
                  int type,
                  uint32_t size,
                  const void *buf,
                  VABufferID * bufIdPtr, void **mappedData)
{
    VABufferID bufId;
    VAStatus status;
    void *data = (void *) buf;

    status =
        vaCreateBuffer(dpy, ctx, (VABufferType) type, size, 1, data,
                       &bufId);
    if (!checkVaapiStatus(status, "vaCreateBuffer()"))
        return false;

    if (mappedData) {
        data = vaapiMapBuffer(dpy, bufId);
        if (!data)
            goto error;
        *mappedData = data;
    }

    *bufIdPtr = bufId;
    return true;

  error:
    vaapiDestroyBuffer(dpy, &bufId);
    return false;
}
예제 #4
0
파일: vaapi.c 프로젝트: AndiDog/FFmpeg
static void *alloc_buffer(struct vaapi_context *vactx, int type, unsigned int size, uint32_t *buf_id)
{
    void *data = NULL;

    *buf_id = 0;
    if (vaCreateBuffer(vactx->display, vactx->context_id,
                       type, size, 1, NULL, buf_id) == VA_STATUS_SUCCESS)
        vaMapBuffer(vactx->display, *buf_id, &data);

    return data;
}
예제 #5
0
파일: vaapi.c 프로젝트: AndiDog/FFmpeg
int ff_vaapi_commit_slices(struct vaapi_context *vactx)
{
    VABufferID *slice_buf_ids;
    VABufferID slice_param_buf_id, slice_data_buf_id;

    if (vactx->slice_count == 0)
        return 0;

    slice_buf_ids =
        av_fast_realloc(vactx->slice_buf_ids,
                        &vactx->slice_buf_ids_alloc,
                        (vactx->n_slice_buf_ids + 2) * sizeof(slice_buf_ids[0]));
    if (!slice_buf_ids)
        return -1;
    vactx->slice_buf_ids = slice_buf_ids;

    slice_param_buf_id = 0;
    if (vaCreateBuffer(vactx->display, vactx->context_id,
                       VASliceParameterBufferType,
                       vactx->slice_param_size,
                       vactx->slice_count, vactx->slice_params,
                       &slice_param_buf_id) != VA_STATUS_SUCCESS)
        return -1;
    vactx->slice_count = 0;

    slice_data_buf_id = 0;
    if (vaCreateBuffer(vactx->display, vactx->context_id,
                       VASliceDataBufferType,
                       vactx->slice_data_size,
                       1, (void *)vactx->slice_data,
                       &slice_data_buf_id) != VA_STATUS_SUCCESS)
        return -1;
    vactx->slice_data = NULL;
    vactx->slice_data_size = 0;

    slice_buf_ids[vactx->n_slice_buf_ids++] = slice_param_buf_id;
    slice_buf_ids[vactx->n_slice_buf_ids++] = slice_data_buf_id;
    return 0;
}
예제 #6
0
파일: vaapi.c 프로젝트: coog009/myproject
static void *alloc_buffer(VAAPIContext *vaapi, int type, unsigned int size, VABufferID *buf_id)
{
    VAStatus status;
    void *data = NULL;

    *buf_id = 0;
    status = vaCreateBuffer(vaapi->display, vaapi->context_id,
                            type, size, 1, NULL, buf_id);
    if (!vaapi_check_status(status, "vaCreateBuffer()"))
        return NULL;

    status = vaMapBuffer(vaapi->display, *buf_id, &data);
    if (!vaapi_check_status(status, "vaMapBuffer()"))
        return NULL;

    return data;
}
Encode_Status VideoEncoderH263::renderSliceParams(EncodeTask *task) {

    VAStatus vaStatus = VA_STATUS_SUCCESS;
    uint32_t sliceHeight;
    uint32_t sliceHeightInMB;

    LOG_V("Begin\n\n");

    sliceHeight = mComParams.resolution.height;
    sliceHeight += 15;
    sliceHeight &= (~15);
    sliceHeightInMB = sliceHeight / 16;

    vaStatus = vaCreateBuffer(
            mVADisplay, mVAContext,
            VAEncSliceParameterBufferType,
            sizeof(VAEncSliceParameterBuffer),
            1, NULL, &mSliceParamBuf);
    CHECK_VA_STATUS_RETURN("vaCreateBuffer");

    VAEncSliceParameterBuffer *sliceParams;
    vaStatus = vaMapBuffer(mVADisplay, mSliceParamBuf, (void **)&sliceParams);
    CHECK_VA_STATUS_RETURN("vaMapBuffer");

    // starting MB row number for this slice
    sliceParams->start_row_number = 0;
    // slice height measured in MB
    sliceParams->slice_height = sliceHeightInMB;
    sliceParams->slice_flags.bits.is_intra = (task->type == FTYPE_I)?1:0;
    sliceParams->slice_flags.bits.disable_deblocking_filter_idc = 0;

    LOG_V("======h263 slice params======\n");
    LOG_I("start_row_number = %d\n", (int) sliceParams->start_row_number);
    LOG_I("slice_height_in_mb = %d\n", (int) sliceParams->slice_height);
    LOG_I("slice.is_intra = %d\n", (int) sliceParams->slice_flags.bits.is_intra);

    vaStatus = vaUnmapBuffer(mVADisplay, mSliceParamBuf);
    CHECK_VA_STATUS_RETURN("vaUnmapBuffer");

    vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSliceParamBuf, 1);
    CHECK_VA_STATUS_RETURN("vaRenderPicture");

    LOG_V("end\n");
    return ENCODE_SUCCESS;
}
Encode_Status VideoEncoderH263::renderPictureParams(EncodeTask *task) {

    VAStatus vaStatus = VA_STATUS_SUCCESS;
    VAEncPictureParameterBufferH263 h263PictureParams = {};

    LOG_V( "Begin\n\n");

    // set picture params for HW
    if(mAutoReference == false){
        h263PictureParams.reference_picture = task->ref_surface;
        h263PictureParams.reconstructed_picture = task->rec_surface;
    }else {
        h263PictureParams.reference_picture = mAutoRefSurfaces[0];
        h263PictureParams.reconstructed_picture = mAutoRefSurfaces[1];
    }

    h263PictureParams.coded_buf = task->coded_buffer;
    h263PictureParams.picture_width = mComParams.resolution.width;
    h263PictureParams.picture_height = mComParams.resolution.height;
    h263PictureParams.picture_type = (task->type == FTYPE_I) ? VAEncPictureTypeIntra : VAEncPictureTypePredictive;

    LOG_V("======h263 picture params======\n");
    LOG_I( "reference_picture = 0x%08x\n", h263PictureParams.reference_picture);
    LOG_I( "reconstructed_picture = 0x%08x\n", h263PictureParams.reconstructed_picture);
    LOG_I( "coded_buf = 0x%08x\n", h263PictureParams.coded_buf);
//    LOG_I( "coded_buf_index = %d\n", mCodedBufIndex);
    LOG_I( "picture_width = %d\n", h263PictureParams.picture_width);
    LOG_I( "picture_height = %d\n",h263PictureParams.picture_height);
    LOG_I( "picture_type = %d\n\n",h263PictureParams.picture_type);

    vaStatus = vaCreateBuffer(
            mVADisplay, mVAContext,
            VAEncPictureParameterBufferType,
            sizeof(h263PictureParams),
            1,&h263PictureParams,
            &mPicParamBuf);
    CHECK_VA_STATUS_RETURN("vaCreateBuffer");

    vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mPicParamBuf , 1);
    CHECK_VA_STATUS_RETURN("vaRenderPicture");

    LOG_V( "end\n");
    return ENCODE_SUCCESS;
}
Encode_Status VideoEncoderH263::renderSequenceParams(EncodeTask *task) {

    VAStatus vaStatus = VA_STATUS_SUCCESS;
    VAEncSequenceParameterBufferH263 h263SequenceParam = {};
    uint32_t frameRateNum = mComParams.frameRate.frameRateNum;
    uint32_t frameRateDenom = mComParams.frameRate.frameRateDenom;

    LOG_V( "Begin\n\n");

    //set up the sequence params for HW
    h263SequenceParam.bits_per_second= mComParams.rcParams.bitRate;
    h263SequenceParam.frame_rate = 
            (unsigned int) (frameRateNum + frameRateDenom /2) / frameRateDenom;   //hard-coded, driver need;
    h263SequenceParam.initial_qp = mComParams.rcParams.initQP;
    h263SequenceParam.min_qp = mComParams.rcParams.minQP;
    h263SequenceParam.intra_period = mComParams.intraPeriod;

    //h263_seq_param.fixed_vop_rate = 30;

    LOG_V("===h263 sequence params===\n");
    LOG_I( "bitrate = %d\n", h263SequenceParam.bits_per_second);
    LOG_I( "frame_rate = %d\n", h263SequenceParam.frame_rate);
    LOG_I( "initial_qp = %d\n", h263SequenceParam.initial_qp);
    LOG_I( "min_qp = %d\n", h263SequenceParam.min_qp);
    LOG_I( "intra_period = %d\n\n", h263SequenceParam.intra_period);

    vaStatus = vaCreateBuffer(
            mVADisplay, mVAContext,
            VAEncSequenceParameterBufferType,
            sizeof(h263SequenceParam),
            1, &h263SequenceParam,
            &mSeqParamBuf);
    CHECK_VA_STATUS_RETURN("vaCreateBuffer");

    vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSeqParamBuf, 1);
    CHECK_VA_STATUS_RETURN("vaRenderPicture");

    LOG_V( "end\n");
    return ENCODE_SUCCESS;
}
Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex) {

    Decode_Status status;
    VAStatus vaStatus;
    uint32_t bufferIDCount = 0;
    // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data
    VABufferID bufferIDs[4];

    vbp_picture_data_h264 *picData = &(data->pic_data[picIndex]);
    vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]);
    VAPictureParameterBufferH264 *picParam = picData->pic_parms;
    VASliceParameterBufferH264 *sliceParam = &(sliceData->slc_parms);

    if (sliceParam->first_mb_in_slice == 0 || mDecodingFrame == false) {
        // either condition indicates start of a new frame
        if (sliceParam->first_mb_in_slice != 0) {
            WTRACE("The first slice is lost.");
            // TODO: handle the first slice lost
        }
        if (mDecodingFrame) {
            // interlace content, complete decoding the first field
            vaStatus = vaEndPicture(mVADisplay, mVAContext);
            CHECK_VA_STATUS("vaEndPicture");

            // for interlace content, top field may be valid only after the second field is parsed
            mAcquiredBuffer->pictureOrder= picParam->CurrPic.TopFieldOrderCnt;
        }

        // Check there is no reference frame loss before decoding a frame

        // Update  the reference frames and surface IDs for DPB and current frame
        status = updateDPB(picParam);
        CHECK_STATUS("updateDPB");

        //We have to provide a hacked DPB rather than complete DPB for libva as workaround
        status = updateReferenceFrames(picData);
        CHECK_STATUS("updateReferenceFrames");

        vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface);
        CHECK_VA_STATUS("vaBeginPicture");

        // start decoding a frame
        mDecodingFrame = true;

        vaStatus = vaCreateBuffer(
            mVADisplay,
            mVAContext,
            VAPictureParameterBufferType,
            sizeof(VAPictureParameterBufferH264),
            1,
            picParam,
            &bufferIDs[bufferIDCount]);
        CHECK_VA_STATUS("vaCreatePictureParameterBuffer");
        bufferIDCount++;

        vaStatus = vaCreateBuffer(
            mVADisplay,
            mVAContext,
            VAIQMatrixBufferType,
            sizeof(VAIQMatrixBufferH264),
            1,
            data->IQ_matrix_buf,
            &bufferIDs[bufferIDCount]);
        CHECK_VA_STATUS("vaCreateIQMatrixBuffer");
        bufferIDCount++;
    }

    status = setReference(sliceParam);
    CHECK_STATUS("setReference");

    // find which naluinfo is correlated to current slice
    int naluIndex = 0;
    uint32_t accumulatedHeaderLen = 0;
    uint32_t headerLen = 0;
    for (; naluIndex < mMetadata.naluNumber; naluIndex++)  {
        headerLen = mMetadata.naluInfo[naluIndex].naluHeaderLen;
        if (headerLen == 0) {
            WTRACE("lenght of current NAL unit is 0.");
            continue;
        }
        accumulatedHeaderLen += STARTCODE_PREFIX_LEN;
        if (accumulatedHeaderLen + headerLen > sliceData->slice_offset) {
            break;
        }
        accumulatedHeaderLen += headerLen;
    }

    if (sliceData->slice_offset != accumulatedHeaderLen) {
        WTRACE("unexpected slice offset %d, accumulatedHeaderLen = %d", sliceData->slice_offset, accumulatedHeaderLen);
    }

    sliceParam->slice_data_size = mMetadata.naluInfo[naluIndex].naluLen;
    uint32_t sliceOffset = mMetadata.naluInfo[naluIndex].naluOffset;
    uint32_t slice_offset_shift =  sliceOffset % 16;
    sliceParam->slice_data_offset += slice_offset_shift;
    sliceData->slice_size = (sliceParam->slice_data_size + slice_offset_shift + 0xF) & ~0xF;

    vaStatus = vaCreateBuffer(
        mVADisplay,
        mVAContext,
        VASliceParameterBufferType,
        sizeof(VASliceParameterBufferH264),
        1,
        sliceParam,
        &bufferIDs[bufferIDCount]);
    CHECK_VA_STATUS("vaCreateSliceParameterBuffer");
    bufferIDCount++;

    // sliceData->slice_offset - accumulatedHeaderLen is the absolute offset to start codes of current NAL unit
    // offset points to first byte of NAL unit

    if (mInputBuffer != NULL) {
        vaStatus = vaCreateBuffer(
            mVADisplay,
            mVAContext,
            VASliceDataBufferType,
            sliceData->slice_size,  //Slice size
            1,                      // num_elements
            mInputBuffer + sliceOffset - slice_offset_shift,
            &bufferIDs[bufferIDCount]);
    } else {
        vaStatus = vaCreateBuffer(
            mVADisplay,
            mVAContext,
            VAProtectedSliceDataBufferType,
            sliceData->slice_size, //size
            1,        //num_elements
            (uint8_t*)sliceOffset, // IMR offset
            &bufferIDs[bufferIDCount]);
    }
    CHECK_VA_STATUS("vaCreateSliceDataBuffer");
    bufferIDCount++;

    vaStatus = vaRenderPicture(
        mVADisplay,
        mVAContext,
        bufferIDs,
        bufferIDCount);
    CHECK_VA_STATUS("vaRenderPicture");

    return DECODE_SUCCESS;
}
예제 #11
0
static int deint_vaapi_filter_frame(AVFilterLink *inlink, AVFrame *input_frame)
{
    AVFilterContext   *avctx = inlink->dst;
    AVFilterLink    *outlink = avctx->outputs[0];
    DeintVAAPIContext *ctx = avctx->priv;
    AVFrame *output_frame = NULL;
    VASurfaceID input_surface, output_surface;
    VASurfaceID backward_references[MAX_REFERENCES];
    VASurfaceID forward_references[MAX_REFERENCES];
    VAProcPipelineParameterBuffer params;
    VAProcFilterParameterBufferDeinterlacing *filter_params;
    VARectangle input_region;
    VABufferID params_id;
    VAStatus vas;
    void *filter_params_addr = NULL;
    int err, i, field, current_frame_index;

    av_log(avctx, AV_LOG_DEBUG, "Filter input: %s, %ux%u (%"PRId64").\n",
           av_get_pix_fmt_name(input_frame->format),
           input_frame->width, input_frame->height, input_frame->pts);

    if (ctx->queue_count < ctx->queue_depth) {
        ctx->frame_queue[ctx->queue_count++] = input_frame;
        if (ctx->queue_count < ctx->queue_depth) {
            // Need more reference surfaces before we can continue.
            return 0;
        }
    } else {
        av_frame_free(&ctx->frame_queue[0]);
        for (i = 0; i + 1 < ctx->queue_count; i++)
            ctx->frame_queue[i] = ctx->frame_queue[i + 1];
        ctx->frame_queue[i] = input_frame;
    }

    current_frame_index = ctx->pipeline_caps.num_forward_references;

    input_frame = ctx->frame_queue[current_frame_index];
    input_surface = (VASurfaceID)(uintptr_t)input_frame->data[3];
    for (i = 0; i < ctx->pipeline_caps.num_forward_references; i++)
        forward_references[i] = (VASurfaceID)(uintptr_t)
            ctx->frame_queue[current_frame_index - i - 1]->data[3];
    for (i = 0; i < ctx->pipeline_caps.num_backward_references; i++)
        backward_references[i] = (VASurfaceID)(uintptr_t)
            ctx->frame_queue[current_frame_index + i + 1]->data[3];

    av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for "
           "deinterlace input.\n", input_surface);
    av_log(avctx, AV_LOG_DEBUG, "Backward references:");
    for (i = 0; i < ctx->pipeline_caps.num_backward_references; i++)
        av_log(avctx, AV_LOG_DEBUG, " %#x", backward_references[i]);
    av_log(avctx, AV_LOG_DEBUG, "\n");
    av_log(avctx, AV_LOG_DEBUG, "Forward  references:");
    for (i = 0; i < ctx->pipeline_caps.num_forward_references; i++)
        av_log(avctx, AV_LOG_DEBUG, " %#x", forward_references[i]);
    av_log(avctx, AV_LOG_DEBUG, "\n");

    for (field = 0; field < ctx->field_rate; field++) {
        output_frame = ff_get_video_buffer(outlink, ctx->output_width,
                                           ctx->output_height);
        if (!output_frame) {
            err = AVERROR(ENOMEM);
            goto fail;
        }

        output_surface = (VASurfaceID)(uintptr_t)output_frame->data[3];
        av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for "
               "deinterlace output.\n", output_surface);

        memset(&params, 0, sizeof(params));

        input_region = (VARectangle) {
            .x      = 0,
            .y      = 0,
            .width  = input_frame->width,
            .height = input_frame->height,
        };

        params.surface = input_surface;
        params.surface_region = &input_region;
        params.surface_color_standard =
            vaapi_proc_colour_standard(input_frame->colorspace);

        params.output_region = NULL;
        params.output_background_color = 0xff000000;
        params.output_color_standard = params.surface_color_standard;

        params.pipeline_flags = 0;
        params.filter_flags   = VA_FRAME_PICTURE;

        if (!ctx->auto_enable || input_frame->interlaced_frame) {
            vas = vaMapBuffer(ctx->hwctx->display, ctx->filter_buffer,
                              &filter_params_addr);
            if (vas != VA_STATUS_SUCCESS) {
                av_log(avctx, AV_LOG_ERROR, "Failed to map filter parameter "
                       "buffer: %d (%s).\n", vas, vaErrorStr(vas));
                err = AVERROR(EIO);
                goto fail;
            }
            filter_params = filter_params_addr;
            filter_params->flags = 0;
            if (input_frame->top_field_first) {
                filter_params->flags |= field ? VA_DEINTERLACING_BOTTOM_FIELD : 0;
            } else {
                filter_params->flags |= VA_DEINTERLACING_BOTTOM_FIELD_FIRST;
                filter_params->flags |= field ? 0 : VA_DEINTERLACING_BOTTOM_FIELD;
            }
            filter_params_addr = NULL;
            vas = vaUnmapBuffer(ctx->hwctx->display, ctx->filter_buffer);
            if (vas != VA_STATUS_SUCCESS)
                av_log(avctx, AV_LOG_ERROR, "Failed to unmap filter parameter "
                       "buffer: %d (%s).\n", vas, vaErrorStr(vas));

            params.filters     = &ctx->filter_buffer;
            params.num_filters = 1;

            params.forward_references = forward_references;
            params.num_forward_references =
                ctx->pipeline_caps.num_forward_references;
            params.backward_references = backward_references;
            params.num_backward_references =
                ctx->pipeline_caps.num_backward_references;

        } else {
            params.filters     = NULL;
            params.num_filters = 0;
        }

        vas = vaBeginPicture(ctx->hwctx->display,
                             ctx->va_context, output_surface);
        if (vas != VA_STATUS_SUCCESS) {
            av_log(avctx, AV_LOG_ERROR, "Failed to attach new picture: "
                   "%d (%s).\n", vas, vaErrorStr(vas));
            err = AVERROR(EIO);
            goto fail;
        }

        vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
                             VAProcPipelineParameterBufferType,
                             sizeof(params), 1, &params, &params_id);
        if (vas != VA_STATUS_SUCCESS) {
            av_log(avctx, AV_LOG_ERROR, "Failed to create parameter buffer: "
                   "%d (%s).\n", vas, vaErrorStr(vas));
            err = AVERROR(EIO);
            goto fail_after_begin;
        }
        av_log(avctx, AV_LOG_DEBUG, "Pipeline parameter buffer is %#x.\n",
               params_id);

        vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context,
                              &params_id, 1);
        if (vas != VA_STATUS_SUCCESS) {
            av_log(avctx, AV_LOG_ERROR, "Failed to render parameter buffer: "
                   "%d (%s).\n", vas, vaErrorStr(vas));
            err = AVERROR(EIO);
            goto fail_after_begin;
        }

        vas = vaEndPicture(ctx->hwctx->display, ctx->va_context);
        if (vas != VA_STATUS_SUCCESS) {
            av_log(avctx, AV_LOG_ERROR, "Failed to start picture processing: "
                   "%d (%s).\n", vas, vaErrorStr(vas));
            err = AVERROR(EIO);
            goto fail_after_render;
        }

        if (ctx->hwctx->driver_quirks &
            AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS) {
            vas = vaDestroyBuffer(ctx->hwctx->display, params_id);
            if (vas != VA_STATUS_SUCCESS) {
                av_log(avctx, AV_LOG_ERROR, "Failed to free parameter buffer: "
                       "%d (%s).\n", vas, vaErrorStr(vas));
                // And ignore.
            }
        }

        err = av_frame_copy_props(output_frame, input_frame);
        if (err < 0)
            goto fail;

        if (ctx->field_rate == 2) {
            if (field == 0)
                output_frame->pts = 2 * input_frame->pts;
            else
                output_frame->pts = input_frame->pts +
                    ctx->frame_queue[current_frame_index + 1]->pts;
        }
        output_frame->interlaced_frame = 0;

        av_log(avctx, AV_LOG_DEBUG, "Filter output: %s, %ux%u (%"PRId64").\n",
               av_get_pix_fmt_name(output_frame->format),
               output_frame->width, output_frame->height, output_frame->pts);

        err = ff_filter_frame(outlink, output_frame);
        if (err < 0)
            break;
    }

    return err;

fail_after_begin:
    vaRenderPicture(ctx->hwctx->display, ctx->va_context, &params_id, 1);
fail_after_render:
    vaEndPicture(ctx->hwctx->display, ctx->va_context);
fail:
    if (filter_params_addr)
        vaUnmapBuffer(ctx->hwctx->display, ctx->filter_buffer);
    av_frame_free(&output_frame);
    return err;
}

static av_cold int deint_vaapi_init(AVFilterContext *avctx)
{
    DeintVAAPIContext *ctx = avctx->priv;

    ctx->va_config     = VA_INVALID_ID;
    ctx->va_context    = VA_INVALID_ID;
    ctx->filter_buffer = VA_INVALID_ID;
    ctx->valid_ids = 1;

    return 0;
}
예제 #12
0
	static VABufferID create(VAContextID context, VABufferType type, T *param, int num = 1) {
		VABufferID id = VA_INVALID_ID;
		if (vaCreateBuffer(VaApi::glx(), context, type, sizeof(T), num, param, &id) != VA_STATUS_SUCCESS)
			return VA_INVALID_ID;
		return id;
	}
예제 #13
0
static int scale_vaapi_filter_frame(AVFilterLink *inlink, AVFrame *input_frame)
{
    AVFilterContext *avctx = inlink->dst;
    AVFilterLink *outlink = avctx->outputs[0];
    ScaleVAAPIContext *ctx = avctx->priv;
    AVFrame *output_frame = NULL;
    VASurfaceID input_surface, output_surface;
    VAProcPipelineParameterBuffer params;
    VABufferID params_id;
    VARectangle input_region;
    VAStatus vas;
    int err;

    av_log(ctx, AV_LOG_DEBUG, "Filter input: %s, %ux%u (%"PRId64").\n",
           av_get_pix_fmt_name(input_frame->format),
           input_frame->width, input_frame->height, input_frame->pts);

    if (ctx->va_context == VA_INVALID_ID)
        return AVERROR(EINVAL);

    input_surface = (VASurfaceID)(uintptr_t)input_frame->data[3];
    av_log(ctx, AV_LOG_DEBUG, "Using surface %#x for scale input.\n",
           input_surface);

    output_frame = av_frame_alloc();
    if (!output_frame) {
        av_log(ctx, AV_LOG_ERROR, "Failed to allocate output frame.");
        err = AVERROR(ENOMEM);
        goto fail;
    }

    err = av_hwframe_get_buffer(ctx->output_frames_ref, output_frame, 0);
    if (err < 0) {
        av_log(ctx, AV_LOG_ERROR, "Failed to get surface for "
               "output: %d\n.", err);
    }

    output_surface = (VASurfaceID)(uintptr_t)output_frame->data[3];
    av_log(ctx, AV_LOG_DEBUG, "Using surface %#x for scale output.\n",
           output_surface);

    memset(&params, 0, sizeof(params));

    // If there were top/left cropping, it could be taken into
    // account here.
    input_region = (VARectangle) {
        .x      = 0,
        .y      = 0,
        .width  = input_frame->width,
        .height = input_frame->height,
    };

    params.surface = input_surface;
    params.surface_region = &input_region;
    params.surface_color_standard =
        vaapi_proc_colour_standard(input_frame->colorspace);

    params.output_region = 0;
    params.output_background_color = 0xff000000;
    params.output_color_standard = params.surface_color_standard;

    params.pipeline_flags = 0;
    params.filter_flags = VA_FILTER_SCALING_HQ;

    vas = vaBeginPicture(ctx->hwctx->display,
                         ctx->va_context, output_surface);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(ctx, AV_LOG_ERROR, "Failed to attach new picture: "
               "%d (%s).\n", vas, vaErrorStr(vas));
        err = AVERROR(EIO);
        goto fail;
    }

    vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
                         VAProcPipelineParameterBufferType,
                         sizeof(params), 1, &params, &params_id);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(ctx, AV_LOG_ERROR, "Failed to create parameter buffer: "
               "%d (%s).\n", vas, vaErrorStr(vas));
        err = AVERROR(EIO);
        goto fail_after_begin;
    }
    av_log(ctx, AV_LOG_DEBUG, "Pipeline parameter buffer is %#x.\n",
           params_id);

    vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context,
                          &params_id, 1);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(ctx, AV_LOG_ERROR, "Failed to render parameter buffer: "
               "%d (%s).\n", vas, vaErrorStr(vas));
        err = AVERROR(EIO);
        goto fail_after_begin;
    }

    vas = vaEndPicture(ctx->hwctx->display, ctx->va_context);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(ctx, AV_LOG_ERROR, "Failed to start picture processing: "
               "%d (%s).\n", vas, vaErrorStr(vas));
        err = AVERROR(EIO);
        goto fail_after_render;
    }

    if (ctx->hwctx->driver_quirks &
        AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS) {
        vas = vaDestroyBuffer(ctx->hwctx->display, params_id);
        if (vas != VA_STATUS_SUCCESS) {
            av_log(ctx, AV_LOG_ERROR, "Failed to free parameter buffer: "
                   "%d (%s).\n", vas, vaErrorStr(vas));
            // And ignore.
        }
    }

    av_frame_copy_props(output_frame, input_frame);
    av_frame_free(&input_frame);

    av_log(ctx, AV_LOG_DEBUG, "Filter output: %s, %ux%u (%"PRId64").\n",
           av_get_pix_fmt_name(output_frame->format),
           output_frame->width, output_frame->height, output_frame->pts);

    return ff_filter_frame(outlink, output_frame);

    // We want to make sure that if vaBeginPicture has been called, we also
    // call vaRenderPicture and vaEndPicture.  These calls may well fail or
    // do something else nasty, but once we're in this failure case there
    // isn't much else we can do.
fail_after_begin:
    vaRenderPicture(ctx->hwctx->display, ctx->va_context, &params_id, 1);
fail_after_render:
    vaEndPicture(ctx->hwctx->display, ctx->va_context);
fail:
    av_frame_free(&input_frame);
    av_frame_free(&output_frame);
    return err;
}

static av_cold int scale_vaapi_init(AVFilterContext *avctx)
{
    ScaleVAAPIContext *ctx = avctx->priv;

    ctx->va_config  = VA_INVALID_ID;
    ctx->va_context = VA_INVALID_ID;
    ctx->valid_ids  = 1;

    if (ctx->output_format_string) {
        ctx->output_format = av_get_pix_fmt(ctx->output_format_string);
        if (ctx->output_format == AV_PIX_FMT_NONE) {
            av_log(ctx, AV_LOG_ERROR, "Invalid output format.\n");
            return AVERROR(EINVAL);
        }
    } else {
        // Use the input format once that is configured.
        ctx->output_format = AV_PIX_FMT_NONE;
    }

    return 0;
}
예제 #14
0
mfxStatus vaapiFrameAllocator::AllocImpl(mfxFrameAllocRequest *request, mfxFrameAllocResponse *response)
{
    mfxStatus mfx_res = MFX_ERR_NONE;
    VAStatus  va_res  = VA_STATUS_SUCCESS;
    unsigned int va_fourcc = 0;
    VASurfaceID* surfaces = NULL;
    VASurfaceAttrib attrib;
    vaapiMemId *vaapi_mids = NULL, *vaapi_mid = NULL;
    mfxMemId* mids = NULL;
    mfxU32 fourcc = request->Info.FourCC;
    mfxU16 surfaces_num = request->NumFrameSuggested, numAllocated = 0, i = 0;
    bool bCreateSrfSucceeded = false;

    memset(response, 0, sizeof(mfxFrameAllocResponse));

    va_fourcc = ConvertMfxFourccToVAFormat(fourcc);
    if (!va_fourcc || ((VA_FOURCC_NV12 != va_fourcc) &&
                       (VA_FOURCC_YV12 != va_fourcc) &&
                       (VA_FOURCC_YUY2 != va_fourcc) &&
                       (VA_FOURCC_ARGB != va_fourcc) &&
                       (VA_FOURCC_P208 != va_fourcc)))
    {
        return MFX_ERR_MEMORY_ALLOC;
    }
    if (!surfaces_num)
    {
        return MFX_ERR_MEMORY_ALLOC;
    }
    if (MFX_ERR_NONE == mfx_res)
    {
        surfaces = (VASurfaceID*)calloc(surfaces_num, sizeof(VASurfaceID));
        vaapi_mids = (vaapiMemId*)calloc(surfaces_num, sizeof(vaapiMemId));
        mids = (mfxMemId*)calloc(surfaces_num, sizeof(mfxMemId));
        if ((NULL == surfaces) || (NULL == vaapi_mids) || (NULL == mids)) mfx_res = MFX_ERR_MEMORY_ALLOC;
    }
    if (MFX_ERR_NONE == mfx_res)
    {
        if( VA_FOURCC_P208 != va_fourcc )
        {
            attrib.type = VASurfaceAttribPixelFormat;
            attrib.value.type = VAGenericValueTypeInteger;
            attrib.value.value.i = va_fourcc;
            attrib.flags = VA_SURFACE_ATTRIB_SETTABLE;
            va_res = vaCreateSurfaces(m_dpy,
                                    VA_RT_FORMAT_YUV420,
                                    request->Info.Width, request->Info.Height,
                                    surfaces,
                                    surfaces_num,
                                    &attrib, 1);
            mfx_res = va_to_mfx_status(va_res);
            bCreateSrfSucceeded = (MFX_ERR_NONE == mfx_res);
        }
        else
        {
            VAContextID context_id = request->reserved[0];

            int codedbuf_size = static_cast<int>((request->Info.Width * request->Info.Height) * 400LL / (16 * 16));

            for (numAllocated = 0; numAllocated < surfaces_num; numAllocated++)
            {
                VABufferID coded_buf;

                va_res = vaCreateBuffer(m_dpy,
                                      context_id,
                                      VAEncCodedBufferType,
                                      codedbuf_size,
                                      1,
                                      NULL,
                                      &coded_buf);
                mfx_res = va_to_mfx_status(va_res);
                if (MFX_ERR_NONE != mfx_res) break;
                surfaces[numAllocated] = coded_buf;
            }
        }
    }
    if (MFX_ERR_NONE == mfx_res)
    {
        for (i = 0; i < surfaces_num; ++i)
        {
            vaapi_mid = &(vaapi_mids[i]);
            vaapi_mid->m_fourcc = fourcc;
            vaapi_mid->m_surface = &(surfaces[i]);
            mids[i] = vaapi_mid;
        }
    }
    if (MFX_ERR_NONE == mfx_res)
    {
        response->mids = mids;
        response->NumFrameActual = surfaces_num;
    }
    else // i.e. MFX_ERR_NONE != mfx_res
    {
        response->mids = NULL;
        response->NumFrameActual = 0;
        if (VA_FOURCC_P208 != va_fourcc)
        {
            if (bCreateSrfSucceeded) vaDestroySurfaces(m_dpy, surfaces, surfaces_num);
        }
        else
        {
            for (i = 0; i < numAllocated; i++)
                vaDestroyBuffer(m_dpy, surfaces[i]);
        }
        if (mids)
        {
            free(mids);
            mids = NULL;
        }
        if (vaapi_mids) { free(vaapi_mids); vaapi_mids = NULL; }
        if (surfaces) { free(surfaces); surfaces = NULL; }
    }
    return mfx_res;
}
예제 #15
0
static int vaapi_encode_make_packed_header(AVCodecContext *avctx,
                                           VAAPIEncodePicture *pic,
                                           int type, char *data, size_t bit_len)
{
    VAAPIEncodeContext *ctx = avctx->priv_data;
    VAStatus vas;
    VABufferID param_buffer, data_buffer;
    VAEncPackedHeaderParameterBuffer params = {
        .type = type,
        .bit_length = bit_len,
        .has_emulation_bytes = 1,
    };

    av_assert0(pic->nb_param_buffers + 2 <= MAX_PARAM_BUFFERS);

    vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
                         VAEncPackedHeaderParameterBufferType,
                         sizeof(params), 1, &params, &param_buffer);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "Failed to create parameter buffer "
               "for packed header (type %d): %d (%s).\n",
               type, vas, vaErrorStr(vas));
        return AVERROR(EIO);
    }
    pic->param_buffers[pic->nb_param_buffers++] = param_buffer;

    vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
                         VAEncPackedHeaderDataBufferType,
                         (bit_len + 7) / 8, 1, data, &data_buffer);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "Failed to create data buffer "
               "for packed header (type %d): %d (%s).\n",
               type, vas, vaErrorStr(vas));
        return AVERROR(EIO);
    }
    pic->param_buffers[pic->nb_param_buffers++] = data_buffer;

    av_log(avctx, AV_LOG_DEBUG, "Packed header buffer (%d) is %#x/%#x "
           "(%zu bits).\n", type, param_buffer, data_buffer, bit_len);
    return 0;
}

static int vaapi_encode_make_param_buffer(AVCodecContext *avctx,
                                          VAAPIEncodePicture *pic,
                                          int type, char *data, size_t len)
{
    VAAPIEncodeContext *ctx = avctx->priv_data;
    VAStatus vas;
    VABufferID buffer;

    av_assert0(pic->nb_param_buffers + 1 <= MAX_PARAM_BUFFERS);

    vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
                         type, len, 1, data, &buffer);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "Failed to create parameter buffer "
               "(type %d): %d (%s).\n", type, vas, vaErrorStr(vas));
        return AVERROR(EIO);
    }
    pic->param_buffers[pic->nb_param_buffers++] = buffer;

    av_log(avctx, AV_LOG_DEBUG, "Param buffer (%d) is %#x.\n",
           type, buffer);
    return 0;
}
예제 #16
0
static int vaapi_encode_issue(AVCodecContext *avctx,
                              VAAPIEncodePicture *pic)
{
    VAAPIEncodeContext *ctx = avctx->priv_data;
    VAAPIEncodeSlice *slice;
    VAStatus vas;
    int err, i;
    char data[MAX_PARAM_BUFFER_SIZE];
    size_t bit_len;

    av_log(avctx, AV_LOG_DEBUG, "Issuing encode for pic %"PRId64"/%"PRId64" "
           "as type %s.\n", pic->display_order, pic->encode_order,
           picture_type_name[pic->type]);
    if (pic->nb_refs == 0) {
        av_log(avctx, AV_LOG_DEBUG, "No reference pictures.\n");
    } else {
        av_log(avctx, AV_LOG_DEBUG, "Refers to:");
        for (i = 0; i < pic->nb_refs; i++) {
            av_log(avctx, AV_LOG_DEBUG, " %"PRId64"/%"PRId64,
                   pic->refs[i]->display_order, pic->refs[i]->encode_order);
        }
        av_log(avctx, AV_LOG_DEBUG, ".\n");
    }

    av_assert0(pic->input_available && !pic->encode_issued);
    for (i = 0; i < pic->nb_refs; i++) {
        av_assert0(pic->refs[i]);
        // If we are serialised then the references must have already
        // completed.  If not, they must have been issued but need not
        // have completed yet.
        if (ctx->issue_mode == ISSUE_MODE_SERIALISE_EVERYTHING)
            av_assert0(pic->refs[i]->encode_complete);
        else
            av_assert0(pic->refs[i]->encode_issued);
    }

    av_log(avctx, AV_LOG_DEBUG, "Input surface is %#x.\n", pic->input_surface);

    pic->recon_image = av_frame_alloc();
    if (!pic->recon_image) {
        err = AVERROR(ENOMEM);
        goto fail;
    }

    err = av_hwframe_get_buffer(ctx->recon_frames_ref, pic->recon_image, 0);
    if (err < 0) {
        err = AVERROR(ENOMEM);
        goto fail;
    }
    pic->recon_surface = (VASurfaceID)(uintptr_t)pic->recon_image->data[3];
    av_log(avctx, AV_LOG_DEBUG, "Recon surface is %#x.\n", pic->recon_surface);

    vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
                         VAEncCodedBufferType,
                         MAX_OUTPUT_BUFFER_SIZE, 1, 0,
                         &pic->output_buffer);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "Failed to create bitstream "
               "output buffer: %d (%s).\n", vas, vaErrorStr(vas));
        err = AVERROR(ENOMEM);
        goto fail;
    }
    av_log(avctx, AV_LOG_DEBUG, "Output buffer is %#x.\n",
           pic->output_buffer);

    if (ctx->codec->picture_params_size > 0) {
        pic->codec_picture_params = av_malloc(ctx->codec->picture_params_size);
        if (!pic->codec_picture_params)
            goto fail;
        memcpy(pic->codec_picture_params, ctx->codec_picture_params,
               ctx->codec->picture_params_size);
    } else {
        av_assert0(!ctx->codec_picture_params);
    }

    pic->nb_param_buffers = 0;

    if (pic->encode_order == 0) {
        // Global parameter buffers are set on the first picture only.

        for (i = 0; i < ctx->nb_global_params; i++) {
            err = vaapi_encode_make_param_buffer(avctx, pic,
                                                 VAEncMiscParameterBufferType,
                                                 (char*)ctx->global_params[i],
                                                 ctx->global_params_size[i]);
            if (err < 0)
                goto fail;
        }
    }

    if (pic->type == PICTURE_TYPE_IDR && ctx->codec->init_sequence_params) {
        err = vaapi_encode_make_param_buffer(avctx, pic,
                                             VAEncSequenceParameterBufferType,
                                             ctx->codec_sequence_params,
                                             ctx->codec->sequence_params_size);
        if (err < 0)
            goto fail;
    }

    if (ctx->codec->init_picture_params) {
        err = ctx->codec->init_picture_params(avctx, pic);
        if (err < 0) {
            av_log(avctx, AV_LOG_ERROR, "Failed to initialise picture "
                   "parameters: %d.\n", err);
            goto fail;
        }
        err = vaapi_encode_make_param_buffer(avctx, pic,
                                             VAEncPictureParameterBufferType,
                                             pic->codec_picture_params,
                                             ctx->codec->picture_params_size);
        if (err < 0)
            goto fail;
    }

    if (pic->type == PICTURE_TYPE_IDR) {
        if (ctx->codec->write_sequence_header) {
            bit_len = 8 * sizeof(data);
            err = ctx->codec->write_sequence_header(avctx, data, &bit_len);
            if (err < 0) {
                av_log(avctx, AV_LOG_ERROR, "Failed to write per-sequence "
                       "header: %d.\n", err);
                goto fail;
            }
            err = vaapi_encode_make_packed_header(avctx, pic,
                                                  ctx->codec->sequence_header_type,
                                                  data, bit_len);
            if (err < 0)
                goto fail;
        }
    }

    if (ctx->codec->write_picture_header) {
        bit_len = 8 * sizeof(data);
        err = ctx->codec->write_picture_header(avctx, pic, data, &bit_len);
        if (err < 0) {
            av_log(avctx, AV_LOG_ERROR, "Failed to write per-picture "
                   "header: %d.\n", err);
            goto fail;
        }
        err = vaapi_encode_make_packed_header(avctx, pic,
                                              ctx->codec->picture_header_type,
                                              data, bit_len);
        if (err < 0)
            goto fail;
    }

    if (ctx->codec->write_extra_buffer) {
        for (i = 0;; i++) {
            size_t len = sizeof(data);
            int type;
            err = ctx->codec->write_extra_buffer(avctx, pic, i, &type,
                                                 data, &len);
            if (err == AVERROR_EOF)
                break;
            if (err < 0) {
                av_log(avctx, AV_LOG_ERROR, "Failed to write extra "
                       "buffer %d: %d.\n", i, err);
                goto fail;
            }

            err = vaapi_encode_make_param_buffer(avctx, pic, type,
                                                 data, len);
            if (err < 0)
                goto fail;
        }
    }

    av_assert0(pic->nb_slices <= MAX_PICTURE_SLICES);
    for (i = 0; i < pic->nb_slices; i++) {
        slice = av_mallocz(sizeof(*slice));
        if (!slice) {
            err = AVERROR(ENOMEM);
            goto fail;
        }
        pic->slices[i] = slice;

        if (ctx->codec->slice_params_size > 0) {
            slice->codec_slice_params = av_mallocz(ctx->codec->slice_params_size);
            if (!slice->codec_slice_params) {
                err = AVERROR(ENOMEM);
                goto fail;
            }
        }

        if (ctx->codec->init_slice_params) {
            err = ctx->codec->init_slice_params(avctx, pic, slice);
            if (err < 0) {
                av_log(avctx, AV_LOG_ERROR, "Failed to initalise slice "
                       "parameters: %d.\n", err);
                goto fail;
            }
        }

        if (ctx->codec->write_slice_header) {
            bit_len = 8 * sizeof(data);
            err = ctx->codec->write_slice_header(avctx, pic, slice,
                                                 data, &bit_len);
            if (err < 0) {
                av_log(avctx, AV_LOG_ERROR, "Failed to write per-slice "
                       "header: %d.\n", err);
                goto fail;
            }
            err = vaapi_encode_make_packed_header(avctx, pic,
                                                  ctx->codec->slice_header_type,
                                                  data, bit_len);
            if (err < 0)
                goto fail;
        }

        if (ctx->codec->init_slice_params) {
            err = vaapi_encode_make_param_buffer(avctx, pic,
                                                 VAEncSliceParameterBufferType,
                                                 slice->codec_slice_params,
                                                 ctx->codec->slice_params_size);
            if (err < 0)
                goto fail;
        }
    }

    vas = vaBeginPicture(ctx->hwctx->display, ctx->va_context,
                         pic->input_surface);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "Failed to begin picture encode issue: "
               "%d (%s).\n", vas, vaErrorStr(vas));
        err = AVERROR(EIO);
        goto fail_with_picture;
    }

    vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context,
                          pic->param_buffers, pic->nb_param_buffers);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "Failed to upload encode parameters: "
               "%d (%s).\n", vas, vaErrorStr(vas));
        err = AVERROR(EIO);
        goto fail_with_picture;
    }

    vas = vaEndPicture(ctx->hwctx->display, ctx->va_context);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "Failed to end picture encode issue: "
               "%d (%s).\n", vas, vaErrorStr(vas));
        err = AVERROR(EIO);
        goto fail_at_end;
    }

    pic->encode_issued = 1;

    if (ctx->issue_mode == ISSUE_MODE_SERIALISE_EVERYTHING)
        return vaapi_encode_wait(avctx, pic);
    else
        return 0;

fail_with_picture:
    vaEndPicture(ctx->hwctx->display, ctx->va_context);
fail:
    for(i = 0; i < pic->nb_param_buffers; i++)
        vaDestroyBuffer(ctx->hwctx->display, pic->param_buffers[i]);
fail_at_end:
    av_freep(&pic->codec_picture_params);
    av_frame_free(&pic->recon_image);
    return err;
}
예제 #17
0
void VAApiWriter::init_vpp()
{
#ifdef HAVE_VPP
	use_vpp = true;
	if
	(
		vaCreateConfig( VADisp, ( VAProfile )-1, VAEntrypointVideoProc, NULL, 0, &config_vpp ) == VA_STATUS_SUCCESS &&
		vaCreateContext( VADisp, config_vpp, 0, 0, 0, NULL, 0, &context_vpp ) == VA_STATUS_SUCCESS &&
		vaCreateSurfaces( &id_vpp, 1 )
	)
	{
		unsigned num_filters = VAProcFilterCount;
		VAProcFilterType filters[ VAProcFilterCount ];
		if ( vaQueryVideoProcFilters( VADisp, context_vpp, filters, &num_filters ) != VA_STATUS_SUCCESS )
			num_filters = 0;
		if ( num_filters )
		{
			/* Creating dummy filter (some drivers/api versions crashes without any filter) */
			VAProcFilterParameterBufferBase none_params = { VAProcFilterNone };
			if ( vaCreateBuffer( VADisp, context_vpp, VAProcFilterParameterBufferType, sizeof none_params, 1, &none_params, &vpp_buffers[ VAProcFilterNone ] ) != VA_STATUS_SUCCESS )
				vpp_buffers[ VAProcFilterNone ] = VA_INVALID_ID;
			/* Searching deinterlacing filter */
			if ( vpp_deint_type != VAProcDeinterlacingNone )
				for ( unsigned i = 0 ; i < num_filters ; ++i )
					if ( filters[ i ] == VAProcFilterDeinterlacing )
					{
						VAProcFilterCapDeinterlacing deinterlacing_caps[ VAProcDeinterlacingCount ];
						unsigned num_deinterlacing_caps = VAProcDeinterlacingCount;
						if ( vaQueryVideoProcFilterCaps( VADisp, context_vpp, VAProcFilterDeinterlacing, &deinterlacing_caps, &num_deinterlacing_caps ) != VA_STATUS_SUCCESS )
							num_deinterlacing_caps = 0;
						bool vpp_deint_types[ 2 ] = { false };
						for ( unsigned j = 0 ; j < num_deinterlacing_caps ; ++j )
						{
							switch ( deinterlacing_caps[ j ].type )
							{
								case VAProcDeinterlacingMotionAdaptive:
									vpp_deint_types[ 0 ] = true;
									break;
								case VAProcDeinterlacingMotionCompensated:
									vpp_deint_types[ 1 ] = true;
									break;
								default:
									break;
							}
						}
						if ( vpp_deint_type == VAProcDeinterlacingMotionCompensated && !vpp_deint_types[ 1 ] )
						{
							QMPlay2Core.log( tr( "Nie obsługiwany algorytm usuwania przeplotu" ) + " - Motion compensated", ErrorLog | LogOnce );
							vpp_deint_type = VAProcDeinterlacingMotionAdaptive;
						}
						if ( vpp_deint_type == VAProcDeinterlacingMotionAdaptive && !vpp_deint_types[ 0 ] )
						{
							QMPlay2Core.log( tr( "Nie obsługiwany algorytm usuwania przeplotu" ) + " - Motion adaptive", ErrorLog | LogOnce );
							vpp_deint_type = VAProcDeinterlacingNone;
						}
						if ( vpp_deint_type != VAProcDeinterlacingNone )
						{
							VAProcFilterParameterBufferDeinterlacing deint_params = { VAProcFilterDeinterlacing, vpp_deint_type, 0 };
							if ( vaCreateBuffer( VADisp, context_vpp, VAProcFilterParameterBufferType, sizeof deint_params, 1, &deint_params, &vpp_buffers[ VAProcFilterDeinterlacing ] ) != VA_STATUS_SUCCESS )
								vpp_buffers[ VAProcFilterDeinterlacing ] = VA_INVALID_ID;
						}
						break;
					}
			return;
		}
	}
	if ( vpp_deint_type != VAProcDeinterlacingNone ) //Show error only when filter is required
		QMPlay2Core.log( "VA-API :: " + tr( "Nie można otworzyć filtrów obrazu" ), ErrorLog | LogOnce );
	clr_vpp();
#endif
}
예제 #18
0
void VAApiWriter::init_vpp()
{
#ifdef HAVE_VPP
	use_vpp = true;
	if
	(
		vaCreateConfig( VADisp, ( VAProfile )-1, VAEntrypointVideoProc, NULL, 0, &config_vpp ) == VA_STATUS_SUCCESS &&
		vaCreateContext( VADisp, config_vpp, 0, 0, 0, NULL, 0, &context_vpp ) == VA_STATUS_SUCCESS &&
		vaCreateSurfaces( &id_vpp, 1 )
	)
	{
		unsigned num_filters = VAProcFilterCount;
		VAProcFilterType filters[ VAProcFilterCount ];
		if ( vaQueryVideoProcFilters( VADisp, context_vpp, filters, &num_filters ) != VA_STATUS_SUCCESS )
			num_filters = 0;
		if ( num_filters )
		{
			if ( vpp_deint_type != VAProcDeinterlacingNone )
				for ( unsigned i = 0 ; i < num_filters ; ++i )
					if ( filters[ i ] == VAProcFilterDeinterlacing )
					{
						VAProcFilterCapDeinterlacing deinterlacing_caps[ VAProcDeinterlacingCount ];
						unsigned num_deinterlacing_caps = VAProcDeinterlacingCount;
						if ( vaQueryVideoProcFilterCaps( VADisp, context_vpp, VAProcFilterDeinterlacing, &deinterlacing_caps, &num_deinterlacing_caps ) != VA_STATUS_SUCCESS )
							num_deinterlacing_caps = 0;
						bool vpp_deint_types[ 2 ] = { false };
						for ( unsigned j = 0 ; j < num_deinterlacing_caps ; ++j )
						{
							switch ( deinterlacing_caps[ j ].type )
							{
								case VAProcDeinterlacingMotionAdaptive:
									vpp_deint_types[ 0 ] = true;
									break;
								case VAProcDeinterlacingMotionCompensated:
									vpp_deint_types[ 1 ] = true;
									break;
								default:
									break;
							}
						}
						if ( vpp_deint_type == VAProcDeinterlacingMotionCompensated && !vpp_deint_types[ 1 ] )
						{
							QMPlay2Core.log( tr( "Nie obsługiwany algorytm usuwania przeplotu" ) + " - Motion compensated", ErrorLog | LogOnce );
							vpp_deint_type = VAProcDeinterlacingMotionAdaptive;
						}
						if ( vpp_deint_type == VAProcDeinterlacingMotionAdaptive && !vpp_deint_types[ 0 ] )
						{
							QMPlay2Core.log( tr( "Nie obsługiwany algorytm usuwania przeplotu" ) + " - Motion adaptive", ErrorLog | LogOnce );
							vpp_deint_type = VAProcDeinterlacingNone;
						}
						if ( vpp_deint_type != VAProcDeinterlacingNone )
						{
							VAProcFilterParameterBufferDeinterlacing deint_params = { VAProcFilterDeinterlacing, vpp_deint_type, VPP_TFF };
							if ( vaCreateBuffer( VADisp, context_vpp, VAProcFilterParameterBufferType, sizeof deint_params, 1, &deint_params, &vpp_deint ) != VA_STATUS_SUCCESS )
								vpp_deint = VA_INVALID_ID;
						}
						break;
					}
			return;
		}
	}
	if ( vpp_deint_type != VAProcDeinterlacingNone )
		QMPlay2Core.log( tr( "Nie można otworzyć filtrów usuwających przeplot" ), ErrorLog | LogOnce );
	clr_vpp();
#endif
}
예제 #19
0
static int deint_vaapi_build_filter_params(AVFilterContext *avctx)
{
    DeintVAAPIContext *ctx = avctx->priv;
    VAStatus vas;
    VAProcFilterParameterBufferDeinterlacing params;
    int i;

    ctx->nb_deint_caps = VAProcDeinterlacingCount;
    vas = vaQueryVideoProcFilterCaps(ctx->hwctx->display,
                                     ctx->va_context,
                                     VAProcFilterDeinterlacing,
                                     &ctx->deint_caps,
                                     &ctx->nb_deint_caps);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "Failed to query deinterlacing "
               "caps: %d (%s).\n", vas, vaErrorStr(vas));
        return AVERROR(EIO);
    }

    if (ctx->mode == VAProcDeinterlacingNone) {
        for (i = 0; i < ctx->nb_deint_caps; i++) {
            if (ctx->deint_caps[i].type > ctx->mode)
                ctx->mode = ctx->deint_caps[i].type;
        }
        av_log(avctx, AV_LOG_VERBOSE, "Picking %d (%s) as default "
               "deinterlacing mode.\n", ctx->mode,
               deint_vaapi_mode_name(ctx->mode));
    } else {
        for (i = 0; i < ctx->nb_deint_caps; i++) {
            if (ctx->deint_caps[i].type == ctx->mode)
                break;
        }
        if (i >= ctx->nb_deint_caps) {
            av_log(avctx, AV_LOG_ERROR, "Deinterlacing mode %d (%s) is "
                   "not supported.\n", ctx->mode,
                   deint_vaapi_mode_name(ctx->mode));
        }
    }

    params.type      = VAProcFilterDeinterlacing;
    params.algorithm = ctx->mode;
    params.flags     = 0;

    av_assert0(ctx->filter_buffer == VA_INVALID_ID);
    vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
                         VAProcFilterParameterBufferType,
                         sizeof(params), 1, &params,
                         &ctx->filter_buffer);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "Failed to create deinterlace "
               "parameter buffer: %d (%s).\n", vas, vaErrorStr(vas));
        return AVERROR(EIO);
    }

    vas = vaQueryVideoProcPipelineCaps(ctx->hwctx->display,
                                       ctx->va_context,
                                       &ctx->filter_buffer, 1,
                                       &ctx->pipeline_caps);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "Failed to query pipeline "
               "caps: %d (%s).\n", vas, vaErrorStr(vas));
        return AVERROR(EIO);
    }

    ctx->extra_delay_for_timestamps = ctx->field_rate == 2 &&
        ctx->pipeline_caps.num_backward_references == 0;

    ctx->queue_depth = ctx->pipeline_caps.num_backward_references +
                       ctx->pipeline_caps.num_forward_references +
                       ctx->extra_delay_for_timestamps + 1;
    if (ctx->queue_depth > MAX_REFERENCES) {
        av_log(avctx, AV_LOG_ERROR, "Pipeline requires too many "
               "references (%u forward, %u back).\n",
               ctx->pipeline_caps.num_forward_references,
               ctx->pipeline_caps.num_backward_references);
        return AVERROR(ENOSYS);
    }

    return 0;
}
Decode_Status VideoDecoderVP8::decodePicture(vbp_data_vp8 *data, int32_t picIndex) {
    VAStatus vaStatus = VA_STATUS_SUCCESS;
    Decode_Status status;
    uint32_t bufferIDCount = 0;
    VABufferID bufferIDs[5];

    vbp_picture_data_vp8 *picData = &(data->pic_data[picIndex]);
    VAPictureParameterBufferVP8 *picParams = picData->pic_parms;

    status = setReference(picParams);
    CHECK_STATUS("setReference");

    vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface);
    CHECK_VA_STATUS("vaBeginPicture");
    // setting mDecodingFrame to true so vaEndPicture will be invoked to end the picture decoding.
    mDecodingFrame = true;

    vaStatus = vaCreateBuffer(
                   mVADisplay,
                   mVAContext,
                   VAPictureParameterBufferType,
                   sizeof(VAPictureParameterBufferVP8),
                   1,
                   picParams,
                   &bufferIDs[bufferIDCount]);
    CHECK_VA_STATUS("vaCreatePictureParameterBuffer");
    bufferIDCount++;

    vaStatus = vaCreateBuffer(
                   mVADisplay,
                   mVAContext,
                   VAProbabilityBufferType,
                   sizeof(VAProbabilityDataBufferVP8),
                   1,
                   data->prob_data,
                   &bufferIDs[bufferIDCount]);
    CHECK_VA_STATUS("vaCreateProbabilityBuffer");
    bufferIDCount++;

    vaStatus = vaCreateBuffer(
                   mVADisplay,
                   mVAContext,
                   VAIQMatrixBufferType,
                   sizeof(VAIQMatrixBufferVP8),
                   1,
                   data->IQ_matrix_buf,
                   &bufferIDs[bufferIDCount]);
    CHECK_VA_STATUS("vaCreateIQMatrixBuffer");
    bufferIDCount++;

    /* Here picData->num_slices is always equal to 1 */
    for (uint32_t i = 0; i < picData->num_slices; i++) {
        vaStatus = vaCreateBuffer(
                       mVADisplay,
                       mVAContext,
                       VASliceParameterBufferType,
                       sizeof(VASliceParameterBufferVP8),
                       1,
                       &(picData->slc_data[i].slc_parms),
                       &bufferIDs[bufferIDCount]);
        CHECK_VA_STATUS("vaCreateSliceParameterBuffer");
        bufferIDCount++;

        vaStatus = vaCreateBuffer(
                       mVADisplay,
                       mVAContext,
                       VASliceDataBufferType,
                       picData->slc_data[i].slice_size, //size
                       1,        //num_elements
                       picData->slc_data[i].buffer_addr + picData->slc_data[i].slice_offset,
                       &bufferIDs[bufferIDCount]);
        CHECK_VA_STATUS("vaCreateSliceDataBuffer");
        bufferIDCount++;
    }

    vaStatus = vaRenderPicture(
                   mVADisplay,
                   mVAContext,
                   bufferIDs,
                   bufferIDCount);
    CHECK_VA_STATUS("vaRenderPicture");

    vaStatus = vaEndPicture(mVADisplay, mVAContext);
    mDecodingFrame = false;
    CHECK_VA_STATUS("vaEndPicture");

    return DECODE_SUCCESS;
}
예제 #21
0
static void h264_decode_frame(int f_width, int f_height, char *framedata, int framesize, int slice_type)
{
    VAStatus va_status;

    DebugLog(("%s: called for frame of %d bytes (%dx%d) slice_type=%d\n", __FUNCTION__, framesize, width, height, slice_type));

    /* Initialize decode pipeline if necessary */
    if ( (f_width > cur_width) || (f_height > cur_height) ) {
        if (va_dpy != NULL)
            h264_cleanup_decoder();
        cur_width = f_width;
        cur_height = f_height;

        h264_init_decoder(f_width, f_height);
        rfbClientLog("%s: decoder initialized\n", __FUNCTION__);
    }

    /* Decode frame */
    static VAPictureH264 va_picture_h264, va_old_picture_h264;

    /* The server should always send an I-frame when a new client connects
     * or when the resolution of the framebuffer changes, but we check
     * just in case.
     */
    if ( (slice_type != SLICE_TYPE_I) && (num_frames == 0) ) {
        rfbClientLog("First frame is not an I frame !!! Skipping!!!\n");
        return;
    }

    DebugLog(("%s: frame_id=%d va_surface_id[%d]=0x%x field_order_count=%d\n", __FUNCTION__, frame_id, sid, va_surface_id[sid], field_order_count));

    va_picture_h264.picture_id = va_surface_id[sid];
    va_picture_h264.frame_idx  = frame_id;
    va_picture_h264.flags = 0;
    va_picture_h264.BottomFieldOrderCnt = field_order_count;
    va_picture_h264.TopFieldOrderCnt = field_order_count;

    /* Set up picture parameter buffer */
    if (va_pic_param_buf_id[sid] == VA_INVALID_ID) {
        va_status = vaCreateBuffer(va_dpy, va_context_id, VAPictureParameterBufferType, sizeof(VAPictureParameterBufferH264), 1, NULL, &va_pic_param_buf_id[sid]);
        CHECK_VASTATUS(va_status, "vaCreateBuffer(PicParam)");
    }
    CHECK_SURF(va_surface_id[sid]);

    VAPictureParameterBufferH264 *pic_param_buf = NULL;
    va_status = vaMapBuffer(va_dpy, va_pic_param_buf_id[sid], (void **)&pic_param_buf);
    CHECK_VASTATUS(va_status, "vaMapBuffer(PicParam)");

    SetVAPictureParameterBufferH264(pic_param_buf, f_width, f_height);
    memcpy(&pic_param_buf->CurrPic, &va_picture_h264, sizeof(VAPictureH264));

    if (slice_type == SLICE_TYPE_P) {
        memcpy(&pic_param_buf->ReferenceFrames[0], &va_old_picture_h264, sizeof(VAPictureH264));
        pic_param_buf->ReferenceFrames[0].flags = 0;
    }
    else if (slice_type != SLICE_TYPE_I) {
        rfbClientLog("Frame type %d not supported!!!\n");
        return;
    }
    pic_param_buf->frame_num = frame_id;

    va_status = vaUnmapBuffer(va_dpy, va_pic_param_buf_id[sid]);
    CHECK_VASTATUS(va_status, "vaUnmapBuffer(PicParam)");

    /* Set up IQ matrix buffer */
    if (va_mat_param_buf_id[sid] == VA_INVALID_ID) {
        va_status = vaCreateBuffer(va_dpy, va_context_id, VAIQMatrixBufferType, sizeof(VAIQMatrixBufferH264), 1, NULL, &va_mat_param_buf_id[sid]);
        CHECK_VASTATUS(va_status, "vaCreateBuffer(IQMatrix)");
    }
    CHECK_SURF(va_surface_id[sid]);

    VAIQMatrixBufferH264 *iq_matrix_buf = NULL;
    va_status = vaMapBuffer(va_dpy, va_mat_param_buf_id[sid], (void **)&iq_matrix_buf);
    CHECK_VASTATUS(va_status, "vaMapBuffer(IQMatrix)");

    static const unsigned char m_MatrixBufferH264[]= {
        /* ScalingList4x4[6][16] */
        0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,
        0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,
        0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,
        0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,
        0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,
        0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,
        /* ScalingList8x8[2][64] */
        0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
        0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
        0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
        0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
        0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
        0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
        0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
        0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
        0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
        0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
        0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
        0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
        0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
        0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
        0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
        0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00
    };

    memcpy(iq_matrix_buf, m_MatrixBufferH264, 224);
    va_status = vaUnmapBuffer(va_dpy, va_mat_param_buf_id[sid]);
    CHECK_VASTATUS(va_status, "vaUnmapBuffer(IQMatrix)");

    VABufferID buffer_ids[2];
    buffer_ids[0] = va_pic_param_buf_id[sid];
    buffer_ids[1] = va_mat_param_buf_id[sid];

    CHECK_SURF(va_surface_id[sid]);
    va_status = vaRenderPicture(va_dpy, va_context_id, buffer_ids, 2);
    CHECK_VASTATUS(va_status, "vaRenderPicture");

    /* Set up slice parameter buffer */
    if (va_sp_param_buf_id[sid] == VA_INVALID_ID) {
        va_status = vaCreateBuffer(va_dpy, va_context_id, VASliceParameterBufferType, sizeof(VASliceParameterBufferH264), 1, NULL, &va_sp_param_buf_id[sid]);
        CHECK_VASTATUS(va_status, "vaCreateBuffer(SliceParam)");
    }
    CHECK_SURF(va_surface_id[sid]);

    VASliceParameterBufferH264 *slice_param_buf = NULL;
    va_status = vaMapBuffer(va_dpy, va_sp_param_buf_id[sid], (void **)&slice_param_buf);
    CHECK_VASTATUS(va_status, "vaMapBuffer(SliceParam)");

    static int t2_first = 1;
    if (slice_type == SLICE_TYPE_I) {
        SetVASliceParameterBufferH264_Intra(slice_param_buf, t2_first);
        t2_first = 0;
    } else {
        SetVASliceParameterBufferH264(slice_param_buf);
        memcpy(&slice_param_buf->RefPicList0[0], &va_old_picture_h264, sizeof(VAPictureH264));
        slice_param_buf->RefPicList0[0].flags = 0;
    }
    slice_param_buf->slice_data_bit_offset = 0;
    slice_param_buf->slice_data_size = framesize;

    va_status = vaUnmapBuffer(va_dpy, va_sp_param_buf_id[sid]);
    CHECK_VASTATUS(va_status, "vaUnmapBuffer(SliceParam)");
    CHECK_SURF(va_surface_id[sid]);

    /* Set up slice data buffer and copy H.264 encoded data */
    if (va_d_param_buf_id[sid] == VA_INVALID_ID) {
        /* TODO use estimation matching framebuffer dimensions instead of this large value */
        va_status = vaCreateBuffer(va_dpy, va_context_id, VASliceDataBufferType, 4177920, 1, NULL, &va_d_param_buf_id[sid]); /* 1080p size */
        CHECK_VASTATUS(va_status, "vaCreateBuffer(SliceData)");
    }

    char *slice_data_buf;
    va_status = vaMapBuffer(va_dpy, va_d_param_buf_id[sid], (void **)&slice_data_buf);
    CHECK_VASTATUS(va_status, "vaMapBuffer(SliceData)");
    memcpy(slice_data_buf, framedata, framesize);

    CHECK_SURF(va_surface_id[sid]);
    va_status = vaUnmapBuffer(va_dpy, va_d_param_buf_id[sid]);
    CHECK_VASTATUS(va_status, "vaUnmapBuffer(SliceData)");

    buffer_ids[0] = va_sp_param_buf_id[sid];
    buffer_ids[1] = va_d_param_buf_id[sid];

    CHECK_SURF(va_surface_id[sid]);
    va_status = vaRenderPicture(va_dpy, va_context_id, buffer_ids, 2);
    CHECK_VASTATUS(va_status, "vaRenderPicture");

    va_status = vaEndPicture(va_dpy, va_context_id);
    CHECK_VASTATUS(va_status, "vaEndPicture");

    /* Prepare next one... */
    int sid_new = (sid + 1) % SURFACE_NUM;
    DebugLog(("%s: new Surface ID = %d\n", __FUNCTION__, sid_new));
    va_status = vaBeginPicture(va_dpy, va_context_id, va_surface_id[sid_new]);
    CHECK_VASTATUS(va_status, "vaBeginPicture");

    /* Get decoded data */
    va_status = vaSyncSurface(va_dpy, va_surface_id[sid]);
    CHECK_VASTATUS(va_status, "vaSyncSurface");
    CHECK_SURF(va_surface_id[sid]);

    curr_surface = va_surface_id[sid];

    sid = sid_new;

    field_order_count += 2;
    ++frame_id;
    if (frame_id > 15) {
        frame_id = 0;
    }

    ++num_frames;

    memcpy(&va_old_picture_h264, &va_picture_h264, sizeof(VAPictureH264));
}
예제 #22
0
int ff_vaapi_decode_make_slice_buffer(AVCodecContext *avctx,
                                      VAAPIDecodePicture *pic,
                                      const void *params_data,
                                      size_t params_size,
                                      const void *slice_data,
                                      size_t slice_size)
{
    VAAPIDecodeContext *ctx = avctx->internal->hwaccel_priv_data;
    VAStatus vas;
    int index;

    av_assert0(pic->nb_slices <= pic->slices_allocated);
    if (pic->nb_slices == pic->slices_allocated) {
        if (pic->slices_allocated > 0)
            pic->slices_allocated *= 2;
        else
            pic->slices_allocated = 64;

        pic->slice_buffers =
            av_realloc_array(pic->slice_buffers,
                             pic->slices_allocated,
                             2 * sizeof(*pic->slice_buffers));
        if (!pic->slice_buffers)
            return AVERROR(ENOMEM);
    }
    av_assert0(pic->nb_slices + 1 <= pic->slices_allocated);

    index = 2 * pic->nb_slices;

    vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
                         VASliceParameterBufferType,
                         params_size, 1, (void*)params_data,
                         &pic->slice_buffers[index]);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "Failed to create slice "
               "parameter buffer: %d (%s).\n", vas, vaErrorStr(vas));
        return AVERROR(EIO);
    }

    av_log(avctx, AV_LOG_DEBUG, "Slice %d param buffer (%zu bytes) "
           "is %#x.\n", pic->nb_slices, params_size,
           pic->slice_buffers[index]);

    vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
                         VASliceDataBufferType,
                         slice_size, 1, (void*)slice_data,
                         &pic->slice_buffers[index + 1]);
    if (vas != VA_STATUS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "Failed to create slice "
               "data buffer (size %zu): %d (%s).\n",
               slice_size, vas, vaErrorStr(vas));
        vaDestroyBuffer(ctx->hwctx->display,
                        pic->slice_buffers[index]);
        return AVERROR(EIO);
    }

    av_log(avctx, AV_LOG_DEBUG, "Slice %d data buffer (%zu bytes) "
           "is %#x.\n", pic->nb_slices, slice_size,
           pic->slice_buffers[index + 1]);

    ++pic->nb_slices;
    return 0;
}
예제 #23
0
static
VdpStatus
softVdpDecoderRender_h264(VdpDecoder decoder, VdpDecoderData *decoderData,
                          VdpVideoSurfaceData *dstSurfData, VdpPictureInfo const *picture_info,
                          uint32_t bitstream_buffer_count,
                          VdpBitstreamBuffer const *bitstream_buffers)
{
    VdpDeviceData *deviceData = decoderData->device;
    VADisplay va_dpy = deviceData->va_dpy;
    VAStatus status;
    VdpStatus vs, err_code;
    VdpPictureInfoH264 const *vdppi = (void *)picture_info;

    // TODO: figure out where to get level
    uint32_t level = 41;

    // preparing picture parameters and IQ matrix
    VABufferID pic_param_buf, iq_matrix_buf;
    VAPictureParameterBufferH264 pic_param;
    VAIQMatrixBufferH264 iq_matrix;

    vs = h264_translate_reference_frames(dstSurfData, decoder, decoderData, &pic_param, vdppi);
    if (VDP_STATUS_OK != vs) {
        if (VDP_STATUS_RESOURCES == vs) {
            traceError("error (softVdpDecoderRender): no surfaces left in buffer\n");
            err_code = VDP_STATUS_RESOURCES;
        } else {
            err_code = VDP_STATUS_ERROR;
        }
        goto quit;
    }

    h264_translate_pic_param(&pic_param, decoderData->width, decoderData->height, vdppi, level);
    h264_translate_iq_matrix(&iq_matrix, vdppi);

    glx_context_lock();
    status = vaCreateBuffer(va_dpy, decoderData->context_id, VAPictureParameterBufferType,
        sizeof(VAPictureParameterBufferH264), 1, &pic_param, &pic_param_buf);
    if (VA_STATUS_SUCCESS != status) {
        glx_context_unlock();
        err_code = VDP_STATUS_ERROR;
        goto quit;
    }

    status = vaCreateBuffer(va_dpy, decoderData->context_id, VAIQMatrixBufferType,
        sizeof(VAIQMatrixBufferH264), 1, &iq_matrix, &iq_matrix_buf);
    if (VA_STATUS_SUCCESS != status) {
        glx_context_unlock();
        err_code = VDP_STATUS_ERROR;
        goto quit;
    }

    // send data to decoding hardware
    status = vaBeginPicture(va_dpy, decoderData->context_id, dstSurfData->va_surf);
    if (VA_STATUS_SUCCESS != status) {
        glx_context_unlock();
        err_code = VDP_STATUS_ERROR;
        goto quit;
    }
    status = vaRenderPicture(va_dpy, decoderData->context_id, &pic_param_buf, 1);
    if (VA_STATUS_SUCCESS != status) {
        glx_context_unlock();
        err_code = VDP_STATUS_ERROR;
        goto quit;
    }
    status = vaRenderPicture(va_dpy, decoderData->context_id, &iq_matrix_buf, 1);
    if (VA_STATUS_SUCCESS != status) {
        glx_context_unlock();
        err_code = VDP_STATUS_ERROR;
        goto quit;
    }

    vaDestroyBuffer(va_dpy, pic_param_buf);
    vaDestroyBuffer(va_dpy, iq_matrix_buf);
    glx_context_unlock();

    // merge bitstream buffers
    int total_bitstream_bytes = 0;
    for (unsigned int k = 0; k < bitstream_buffer_count; k ++)
        total_bitstream_bytes += bitstream_buffers[k].bitstream_bytes;

    uint8_t *merged_bitstream = malloc(total_bitstream_bytes);
    if (NULL == merged_bitstream) {
        err_code = VDP_STATUS_RESOURCES;
        goto quit;
    }

    do {
        unsigned char *ptr = merged_bitstream;
        for (unsigned int k = 0; k < bitstream_buffer_count; k ++) {
            memcpy(ptr, bitstream_buffers[k].bitstream, bitstream_buffers[k].bitstream_bytes);
            ptr += bitstream_buffers[k].bitstream_bytes;
        }
    } while(0);

    // Slice parameters

    // All slice data have been merged into one continuous buffer. But we must supply
    // slices one by one to the hardware decoder, so we need to delimit them. VDPAU
    // requires bitstream buffers to include slice start code (0x00 0x00 0x01). Those
    // will be used to calculate offsets and sizes of slice data in code below.

    rbsp_state_t st_g;      // reference, global state
    rbsp_attach_buffer(&st_g, merged_bitstream, total_bitstream_bytes);
    int nal_offset = rbsp_navigate_to_nal_unit(&st_g);
    if (nal_offset < 0) {
        traceError("error (softVdpDecoderRender): no NAL header\n");
        err_code = VDP_STATUS_ERROR;
        goto quit;
    }

    do {
        VASliceParameterBufferH264 sp_h264;
        memset(&sp_h264, 0, sizeof(VASliceParameterBufferH264));

        // make a copy of global rbsp state for using in slice header parser
        rbsp_state_t st = rbsp_copy_state(&st_g);
        rbsp_reset_bit_counter(&st);
        int nal_offset_next = rbsp_navigate_to_nal_unit(&st_g);

        // calculate end of current slice. Note (-3). It's slice start code length.
        const unsigned int end_pos = (nal_offset_next > 0) ? (nal_offset_next - 3)
                                                           : total_bitstream_bytes;
        sp_h264.slice_data_size     = end_pos - nal_offset;
        sp_h264.slice_data_offset   = 0;
        sp_h264.slice_data_flag     = VA_SLICE_DATA_FLAG_ALL;

        // TODO: this may be not entirely true for YUV444
        // but if we limiting to YUV420, that's ok
        int ChromaArrayType = pic_param.seq_fields.bits.chroma_format_idc;

        // parse slice header and use its data to fill slice parameter buffer
        parse_slice_header(&st, &pic_param, ChromaArrayType, vdppi->num_ref_idx_l0_active_minus1,
                           vdppi->num_ref_idx_l1_active_minus1, &sp_h264);

        VABufferID slice_parameters_buf;
        glx_context_lock();
        status = vaCreateBuffer(va_dpy, decoderData->context_id, VASliceParameterBufferType,
            sizeof(VASliceParameterBufferH264), 1, &sp_h264, &slice_parameters_buf);
        if (VA_STATUS_SUCCESS != status) {
            glx_context_unlock();
            err_code = VDP_STATUS_ERROR;
            goto quit;
        }
        status = vaRenderPicture(va_dpy, decoderData->context_id, &slice_parameters_buf, 1);
        if (VA_STATUS_SUCCESS != status) {
            glx_context_unlock();
            err_code = VDP_STATUS_ERROR;
            goto quit;
        }

        VABufferID slice_buf;
        status = vaCreateBuffer(va_dpy, decoderData->context_id, VASliceDataBufferType,
            sp_h264.slice_data_size, 1, merged_bitstream + nal_offset, &slice_buf);
        if (VA_STATUS_SUCCESS != status) {
            glx_context_unlock();
            err_code = VDP_STATUS_ERROR;
            goto quit;
        }

        status = vaRenderPicture(va_dpy, decoderData->context_id, &slice_buf, 1);
        if (VA_STATUS_SUCCESS != status) {
            glx_context_unlock();
            err_code = VDP_STATUS_ERROR;
            goto quit;
        }

        vaDestroyBuffer(va_dpy, slice_parameters_buf);
        vaDestroyBuffer(va_dpy, slice_buf);
        glx_context_unlock();

        if (nal_offset_next < 0)        // nal_offset_next equals -1 when there is no slice
            break;                      // start code found. Thus that was the final slice.
        nal_offset = nal_offset_next;
    } while (1);

    glx_context_lock();
    status = vaEndPicture(va_dpy, decoderData->context_id);
    glx_context_unlock();
    if (VA_STATUS_SUCCESS != status) {
        err_code = VDP_STATUS_ERROR;
        goto quit;
    }

    free(merged_bitstream);
    err_code = VDP_STATUS_OK;
quit:
    return err_code;
}
예제 #24
0
qint64 VAApiWriter::write( const QByteArray &data )
{
	VideoFrame *videoFrame = ( VideoFrame * )data.data();
	const VASurfaceID curr_id = ( unsigned long )videoFrame->data[ 3 ];
	const int field = FFCommon::getField( videoFrame, deinterlace, 0, VA_TOP_FIELD, VA_BOTTOM_FIELD );
#ifdef HAVE_VPP
	if ( use_vpp )
	{
		const bool do_vpp_deint = field != 0 && vpp_deint != VA_INVALID_ID;
		bool vpp_ok = false;

		if ( !do_vpp_deint )
		{
			forward_reference = VA_INVALID_SURFACE;
			vpp_second = false;
		}

		if ( do_vpp_deint && forward_reference == VA_INVALID_SURFACE )
			forward_reference = curr_id;
		if ( !vpp_second && forward_reference == curr_id )
			return data.size();

		if ( do_vpp_deint && !vpp_second )
		{
			VAProcFilterParameterBufferDeinterlacing *deint_params = NULL;
			if ( vaMapBuffer( VADisp, vpp_deint, ( void ** )&deint_params ) == VA_STATUS_SUCCESS )
			{
				deint_params->flags = field == VA_TOP_FIELD ? VPP_TFF : VPP_BFF;
				vaUnmapBuffer( VADisp, vpp_deint );
			}
		}

		VABufferID pipeline_buf;
		if ( vaCreateBuffer( VADisp, context_vpp, VAProcPipelineParameterBufferType, sizeof( VAProcPipelineParameterBuffer ), 1, NULL, &pipeline_buf ) == VA_STATUS_SUCCESS )
		{
			VAProcPipelineParameterBuffer *pipeline_param = NULL;
			if ( vaMapBuffer( VADisp, pipeline_buf, ( void ** )&pipeline_param ) == VA_STATUS_SUCCESS )
			{
				memset( pipeline_param, 0, sizeof *pipeline_param );
				pipeline_param->surface = curr_id;
				pipeline_param->output_background_color = 0xFF000000;
				if ( do_vpp_deint )
				{
					pipeline_param->num_filters = 1;
					pipeline_param->filters = &vpp_deint;
					pipeline_param->num_forward_references = 1;
					pipeline_param->forward_references = &forward_reference;
				}
				vaUnmapBuffer( VADisp, pipeline_buf );
				if ( vaBeginPicture( VADisp, context_vpp, id_vpp ) == VA_STATUS_SUCCESS )
				{
					vpp_ok = vaRenderPicture( VADisp, context_vpp, &pipeline_buf, 1 ) == VA_STATUS_SUCCESS;
					vaEndPicture( VADisp, context_vpp );
				}
			}
			if ( !vpp_ok )
				vaDestroyBuffer( VADisp, pipeline_buf );
		}

		if ( vpp_second )
			forward_reference = curr_id;
		if ( do_vpp_deint )
			vpp_second = !vpp_second;

		if ( ( ok = vpp_ok ) )
			draw( id_vpp, do_vpp_deint ? 0 : field );
	}
	else
#endif
		draw( curr_id, field );
	paused = false;
	return data.size();
}