mp_image *render(const VideoFrame &frame, int flags) { if (m_rebuild) update(); auto in = VaApiSurfacePool::getSurface(frame.mpi()); if (!in) return nullptr; m_pool.create(5, frame.width(), frame.height(), in->format()); auto ret = m_pool.getMpImage(); if (!ret) return nullptr; auto out = VaApiSurfacePool::getSurface(ret); if (!out) return nullptr; VAProcPipelineParameterBuffer *param = nullptr; VABufferID buffer = BufferMan::create(m_context, VAProcPipelineParameterBufferType, param, 1); if (buffer == VA_INVALID_ID) return nullptr; enum {Begun = 1, Rendered = 2}; int state = 0; auto pass = [this, out, &ret, &buffer, &state, &frame] () -> mp_image* { if (state & Begun) vaEndPicture(VaApi::glx(), m_context); if (state & Rendered) { mp_image_copy_attributes(ret, frame.mpi()); } else mp_image_unrefp(&ret); vaDestroyBuffer(VaApi::glx(), buffer); vaSyncSurface(VaApi::glx(), out->id()); return ret; }; if (!isSuccess(vaBeginPicture(VaApi::glx(), m_context, out->id()))) return pass(); state |= Begun; if (!BufferMan::map(buffer, param)) return pass(); memset(param, 0, sizeof(*param)); param->surface = in->id(); param->filter_flags = flags; param->filters = &m_buffers.first(); param->num_filters = m_buffers.size(); param->forward_references = m_forward_refs.data(); param->backward_references = m_backward_refs.data(); param->num_forward_references = m_caps.num_forward_references; param->num_backward_references = m_caps.num_backward_references; BufferMan::unmap(buffer); if (!isSuccess(vaRenderPicture(VaApi::glx(), m_context, &buffer, 1))) return pass(); state |= Rendered; return pass(); }
int ff_vaapi_render_picture(FFVAContext *vactx, VASurfaceID surface) { VABufferID va_buffers[3]; unsigned int n_va_buffers = 0; if (vactx->pic_param_buf_id == VA_INVALID_ID) return 0; vaUnmapBuffer(vactx->display, vactx->pic_param_buf_id); va_buffers[n_va_buffers++] = vactx->pic_param_buf_id; if (vactx->iq_matrix_buf_id != VA_INVALID_ID) { vaUnmapBuffer(vactx->display, vactx->iq_matrix_buf_id); va_buffers[n_va_buffers++] = vactx->iq_matrix_buf_id; } if (vactx->bitplane_buf_id != VA_INVALID_ID) { vaUnmapBuffer(vactx->display, vactx->bitplane_buf_id); va_buffers[n_va_buffers++] = vactx->bitplane_buf_id; } if (vaBeginPicture(vactx->display, vactx->context_id, surface) != VA_STATUS_SUCCESS) return -1; if (vaRenderPicture(vactx->display, vactx->context_id, va_buffers, n_va_buffers) != VA_STATUS_SUCCESS) return -1; if (vaRenderPicture(vactx->display, vactx->context_id, vactx->slice_buf_ids, vactx->n_slice_buf_ids) != VA_STATUS_SUCCESS) return -1; if (vaEndPicture(vactx->display, vactx->context_id) != VA_STATUS_SUCCESS) return -1; return 0; }
static VdpStatus softVdpDecoderRender_h264(VdpDecoder decoder, VdpDecoderData *decoderData, VdpVideoSurfaceData *dstSurfData, VdpPictureInfo const *picture_info, uint32_t bitstream_buffer_count, VdpBitstreamBuffer const *bitstream_buffers) { VdpDeviceData *deviceData = decoderData->device; VADisplay va_dpy = deviceData->va_dpy; VAStatus status; VdpStatus vs, err_code; VdpPictureInfoH264 const *vdppi = (void *)picture_info; // TODO: figure out where to get level uint32_t level = 41; // preparing picture parameters and IQ matrix VABufferID pic_param_buf, iq_matrix_buf; VAPictureParameterBufferH264 pic_param; VAIQMatrixBufferH264 iq_matrix; vs = h264_translate_reference_frames(dstSurfData, decoder, decoderData, &pic_param, vdppi); if (VDP_STATUS_OK != vs) { if (VDP_STATUS_RESOURCES == vs) { traceError("error (softVdpDecoderRender): no surfaces left in buffer\n"); err_code = VDP_STATUS_RESOURCES; } else { err_code = VDP_STATUS_ERROR; } goto quit; } h264_translate_pic_param(&pic_param, decoderData->width, decoderData->height, vdppi, level); h264_translate_iq_matrix(&iq_matrix, vdppi); glx_context_lock(); status = vaCreateBuffer(va_dpy, decoderData->context_id, VAPictureParameterBufferType, sizeof(VAPictureParameterBufferH264), 1, &pic_param, &pic_param_buf); if (VA_STATUS_SUCCESS != status) { glx_context_unlock(); err_code = VDP_STATUS_ERROR; goto quit; } status = vaCreateBuffer(va_dpy, decoderData->context_id, VAIQMatrixBufferType, sizeof(VAIQMatrixBufferH264), 1, &iq_matrix, &iq_matrix_buf); if (VA_STATUS_SUCCESS != status) { glx_context_unlock(); err_code = VDP_STATUS_ERROR; goto quit; } // send data to decoding hardware status = vaBeginPicture(va_dpy, decoderData->context_id, dstSurfData->va_surf); if (VA_STATUS_SUCCESS != status) { glx_context_unlock(); err_code = VDP_STATUS_ERROR; goto quit; } status = vaRenderPicture(va_dpy, decoderData->context_id, &pic_param_buf, 1); if (VA_STATUS_SUCCESS != status) { glx_context_unlock(); err_code = VDP_STATUS_ERROR; goto quit; } status = vaRenderPicture(va_dpy, decoderData->context_id, &iq_matrix_buf, 1); if (VA_STATUS_SUCCESS != status) { glx_context_unlock(); err_code = VDP_STATUS_ERROR; goto quit; } vaDestroyBuffer(va_dpy, pic_param_buf); vaDestroyBuffer(va_dpy, iq_matrix_buf); glx_context_unlock(); // merge bitstream buffers int total_bitstream_bytes = 0; for (unsigned int k = 0; k < bitstream_buffer_count; k ++) total_bitstream_bytes += bitstream_buffers[k].bitstream_bytes; uint8_t *merged_bitstream = malloc(total_bitstream_bytes); if (NULL == merged_bitstream) { err_code = VDP_STATUS_RESOURCES; goto quit; } do { unsigned char *ptr = merged_bitstream; for (unsigned int k = 0; k < bitstream_buffer_count; k ++) { memcpy(ptr, bitstream_buffers[k].bitstream, bitstream_buffers[k].bitstream_bytes); ptr += bitstream_buffers[k].bitstream_bytes; } } while(0); // Slice parameters // All slice data have been merged into one continuous buffer. But we must supply // slices one by one to the hardware decoder, so we need to delimit them. VDPAU // requires bitstream buffers to include slice start code (0x00 0x00 0x01). Those // will be used to calculate offsets and sizes of slice data in code below. rbsp_state_t st_g; // reference, global state rbsp_attach_buffer(&st_g, merged_bitstream, total_bitstream_bytes); int nal_offset = rbsp_navigate_to_nal_unit(&st_g); if (nal_offset < 0) { traceError("error (softVdpDecoderRender): no NAL header\n"); err_code = VDP_STATUS_ERROR; goto quit; } do { VASliceParameterBufferH264 sp_h264; memset(&sp_h264, 0, sizeof(VASliceParameterBufferH264)); // make a copy of global rbsp state for using in slice header parser rbsp_state_t st = rbsp_copy_state(&st_g); rbsp_reset_bit_counter(&st); int nal_offset_next = rbsp_navigate_to_nal_unit(&st_g); // calculate end of current slice. Note (-3). It's slice start code length. const unsigned int end_pos = (nal_offset_next > 0) ? (nal_offset_next - 3) : total_bitstream_bytes; sp_h264.slice_data_size = end_pos - nal_offset; sp_h264.slice_data_offset = 0; sp_h264.slice_data_flag = VA_SLICE_DATA_FLAG_ALL; // TODO: this may be not entirely true for YUV444 // but if we limiting to YUV420, that's ok int ChromaArrayType = pic_param.seq_fields.bits.chroma_format_idc; // parse slice header and use its data to fill slice parameter buffer parse_slice_header(&st, &pic_param, ChromaArrayType, vdppi->num_ref_idx_l0_active_minus1, vdppi->num_ref_idx_l1_active_minus1, &sp_h264); VABufferID slice_parameters_buf; glx_context_lock(); status = vaCreateBuffer(va_dpy, decoderData->context_id, VASliceParameterBufferType, sizeof(VASliceParameterBufferH264), 1, &sp_h264, &slice_parameters_buf); if (VA_STATUS_SUCCESS != status) { glx_context_unlock(); err_code = VDP_STATUS_ERROR; goto quit; } status = vaRenderPicture(va_dpy, decoderData->context_id, &slice_parameters_buf, 1); if (VA_STATUS_SUCCESS != status) { glx_context_unlock(); err_code = VDP_STATUS_ERROR; goto quit; } VABufferID slice_buf; status = vaCreateBuffer(va_dpy, decoderData->context_id, VASliceDataBufferType, sp_h264.slice_data_size, 1, merged_bitstream + nal_offset, &slice_buf); if (VA_STATUS_SUCCESS != status) { glx_context_unlock(); err_code = VDP_STATUS_ERROR; goto quit; } status = vaRenderPicture(va_dpy, decoderData->context_id, &slice_buf, 1); if (VA_STATUS_SUCCESS != status) { glx_context_unlock(); err_code = VDP_STATUS_ERROR; goto quit; } vaDestroyBuffer(va_dpy, slice_parameters_buf); vaDestroyBuffer(va_dpy, slice_buf); glx_context_unlock(); if (nal_offset_next < 0) // nal_offset_next equals -1 when there is no slice break; // start code found. Thus that was the final slice. nal_offset = nal_offset_next; } while (1); glx_context_lock(); status = vaEndPicture(va_dpy, decoderData->context_id); glx_context_unlock(); if (VA_STATUS_SUCCESS != status) { err_code = VDP_STATUS_ERROR; goto quit; } free(merged_bitstream); err_code = VDP_STATUS_OK; quit: return err_code; }
Decode_Status VideoDecoderVP8::decodePicture(vbp_data_vp8 *data, int32_t picIndex) { VAStatus vaStatus = VA_STATUS_SUCCESS; Decode_Status status; uint32_t bufferIDCount = 0; VABufferID bufferIDs[5]; vbp_picture_data_vp8 *picData = &(data->pic_data[picIndex]); VAPictureParameterBufferVP8 *picParams = picData->pic_parms; status = setReference(picParams); CHECK_STATUS("setReference"); vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface); CHECK_VA_STATUS("vaBeginPicture"); // setting mDecodingFrame to true so vaEndPicture will be invoked to end the picture decoding. mDecodingFrame = true; vaStatus = vaCreateBuffer( mVADisplay, mVAContext, VAPictureParameterBufferType, sizeof(VAPictureParameterBufferVP8), 1, picParams, &bufferIDs[bufferIDCount]); CHECK_VA_STATUS("vaCreatePictureParameterBuffer"); bufferIDCount++; vaStatus = vaCreateBuffer( mVADisplay, mVAContext, VAProbabilityBufferType, sizeof(VAProbabilityDataBufferVP8), 1, data->prob_data, &bufferIDs[bufferIDCount]); CHECK_VA_STATUS("vaCreateProbabilityBuffer"); bufferIDCount++; vaStatus = vaCreateBuffer( mVADisplay, mVAContext, VAIQMatrixBufferType, sizeof(VAIQMatrixBufferVP8), 1, data->IQ_matrix_buf, &bufferIDs[bufferIDCount]); CHECK_VA_STATUS("vaCreateIQMatrixBuffer"); bufferIDCount++; /* Here picData->num_slices is always equal to 1 */ for (uint32_t i = 0; i < picData->num_slices; i++) { vaStatus = vaCreateBuffer( mVADisplay, mVAContext, VASliceParameterBufferType, sizeof(VASliceParameterBufferVP8), 1, &(picData->slc_data[i].slc_parms), &bufferIDs[bufferIDCount]); CHECK_VA_STATUS("vaCreateSliceParameterBuffer"); bufferIDCount++; vaStatus = vaCreateBuffer( mVADisplay, mVAContext, VASliceDataBufferType, picData->slc_data[i].slice_size, //size 1, //num_elements picData->slc_data[i].buffer_addr + picData->slc_data[i].slice_offset, &bufferIDs[bufferIDCount]); CHECK_VA_STATUS("vaCreateSliceDataBuffer"); bufferIDCount++; } vaStatus = vaRenderPicture( mVADisplay, mVAContext, bufferIDs, bufferIDCount); CHECK_VA_STATUS("vaRenderPicture"); vaStatus = vaEndPicture(mVADisplay, mVAContext); mDecodingFrame = false; CHECK_VA_STATUS("vaEndPicture"); return DECODE_SUCCESS; }
Decode_Status VideoDecoderAVCSecure::decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex) { Decode_Status status; VAStatus vaStatus; uint32_t bufferIDCount = 0; // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data VABufferID bufferIDs[4]; vbp_picture_data_h264 *picData = &(data->pic_data[picIndex]); vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]); VAPictureParameterBufferH264 *picParam = picData->pic_parms; VASliceParameterBufferH264 *sliceParam = &(sliceData->slc_parms); if (sliceParam->first_mb_in_slice == 0 || mDecodingFrame == false) { // either condition indicates start of a new frame if (sliceParam->first_mb_in_slice != 0) { WTRACE("The first slice is lost."); // TODO: handle the first slice lost } if (mDecodingFrame) { // interlace content, complete decoding the first field vaStatus = vaEndPicture(mVADisplay, mVAContext); CHECK_VA_STATUS("vaEndPicture"); // for interlace content, top field may be valid only after the second field is parsed mAcquiredBuffer->pictureOrder= picParam->CurrPic.TopFieldOrderCnt; } // Check there is no reference frame loss before decoding a frame // Update the reference frames and surface IDs for DPB and current frame status = updateDPB(picParam); CHECK_STATUS("updateDPB"); //We have to provide a hacked DPB rather than complete DPB for libva as workaround status = updateReferenceFrames(picData); CHECK_STATUS("updateReferenceFrames"); vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface); CHECK_VA_STATUS("vaBeginPicture"); // start decoding a frame mDecodingFrame = true; vaStatus = vaCreateBuffer( mVADisplay, mVAContext, VAPictureParameterBufferType, sizeof(VAPictureParameterBufferH264), 1, picParam, &bufferIDs[bufferIDCount]); CHECK_VA_STATUS("vaCreatePictureParameterBuffer"); bufferIDCount++; vaStatus = vaCreateBuffer( mVADisplay, mVAContext, VAIQMatrixBufferType, sizeof(VAIQMatrixBufferH264), 1, data->IQ_matrix_buf, &bufferIDs[bufferIDCount]); CHECK_VA_STATUS("vaCreateIQMatrixBuffer"); bufferIDCount++; } status = setReference(sliceParam); CHECK_STATUS("setReference"); // find which naluinfo is correlated to current slice int naluIndex = 0; uint32_t accumulatedHeaderLen = 0; uint32_t headerLen = 0; for (; naluIndex < mMetadata.naluNumber; naluIndex++) { headerLen = mMetadata.naluInfo[naluIndex].naluHeaderLen; if (headerLen == 0) { WTRACE("lenght of current NAL unit is 0."); continue; } accumulatedHeaderLen += STARTCODE_PREFIX_LEN; if (accumulatedHeaderLen + headerLen > sliceData->slice_offset) { break; } accumulatedHeaderLen += headerLen; } if (sliceData->slice_offset != accumulatedHeaderLen) { WTRACE("unexpected slice offset %d, accumulatedHeaderLen = %d", sliceData->slice_offset, accumulatedHeaderLen); } sliceParam->slice_data_size = mMetadata.naluInfo[naluIndex].naluLen; uint32_t sliceOffset = mMetadata.naluInfo[naluIndex].naluOffset; uint32_t slice_offset_shift = sliceOffset % 16; sliceParam->slice_data_offset += slice_offset_shift; sliceData->slice_size = (sliceParam->slice_data_size + slice_offset_shift + 0xF) & ~0xF; vaStatus = vaCreateBuffer( mVADisplay, mVAContext, VASliceParameterBufferType, sizeof(VASliceParameterBufferH264), 1, sliceParam, &bufferIDs[bufferIDCount]); CHECK_VA_STATUS("vaCreateSliceParameterBuffer"); bufferIDCount++; // sliceData->slice_offset - accumulatedHeaderLen is the absolute offset to start codes of current NAL unit // offset points to first byte of NAL unit if (mInputBuffer != NULL) { vaStatus = vaCreateBuffer( mVADisplay, mVAContext, VASliceDataBufferType, sliceData->slice_size, //Slice size 1, // num_elements mInputBuffer + sliceOffset - slice_offset_shift, &bufferIDs[bufferIDCount]); } else { vaStatus = vaCreateBuffer( mVADisplay, mVAContext, VAProtectedSliceDataBufferType, sliceData->slice_size, //size 1, //num_elements (uint8_t*)sliceOffset, // IMR offset &bufferIDs[bufferIDCount]); } CHECK_VA_STATUS("vaCreateSliceDataBuffer"); bufferIDCount++; vaStatus = vaRenderPicture( mVADisplay, mVAContext, bufferIDs, bufferIDCount); CHECK_VA_STATUS("vaRenderPicture"); return DECODE_SUCCESS; }
bool VaapiPicture::decodePicture() { VAStatus status; uint32_t i; vector < VaapiSlice * >::iterator iter; DEBUG("VaapiPicture::decodePicture 0x%08x", m_surfaceID); status = vaBeginPicture(m_display, m_context, m_surfaceID); if (!checkVaapiStatus(status, "vaBeginPicture()")) return false; if (m_picParam) { if (!renderVaBuffer (m_picParam, "vaRenderPicture(), render pic param")) return false; } if (m_probTable) { if (!renderVaBuffer (m_probTable, "vaRenderPicture(), render probability table")) return false; } if (m_iqMatrix) { if (!renderVaBuffer (m_iqMatrix, "vaRenderPicture(), render IQ matrix")) return false; } if (m_bitPlane) { if (!renderVaBuffer (m_bitPlane, "vaRenderPicture(), render bit plane")) return false; } if (m_hufTable) { if (!renderVaBuffer (m_hufTable, "vaRenderPicture(), render huffman table")) return false; } for (iter = m_sliceArray.begin(); iter != m_sliceArray.end(); ++iter) { VaapiBufObject *paramBuf = (*iter)->m_param; VaapiBufObject *dataBuf = (*iter)->m_data; if (!renderVaBuffer (paramBuf, "vaRenderPicture(), render slice param")) break; if (!renderVaBuffer (dataBuf, "vaRenderPicture(), render slice data")) break; } if (iter != m_sliceArray.end()) { m_sliceArray.clear(); return false; } m_sliceArray.clear(); status = vaEndPicture(m_display, m_context); if (!checkVaapiStatus(status, "vaEndPicture()")) return false; return true; }
static int scale_vaapi_filter_frame(AVFilterLink *inlink, AVFrame *input_frame) { AVFilterContext *avctx = inlink->dst; AVFilterLink *outlink = avctx->outputs[0]; ScaleVAAPIContext *ctx = avctx->priv; AVFrame *output_frame = NULL; VASurfaceID input_surface, output_surface; VAProcPipelineParameterBuffer params; VABufferID params_id; VARectangle input_region; VAStatus vas; int err; av_log(ctx, AV_LOG_DEBUG, "Filter input: %s, %ux%u (%"PRId64").\n", av_get_pix_fmt_name(input_frame->format), input_frame->width, input_frame->height, input_frame->pts); if (ctx->va_context == VA_INVALID_ID) return AVERROR(EINVAL); input_surface = (VASurfaceID)(uintptr_t)input_frame->data[3]; av_log(ctx, AV_LOG_DEBUG, "Using surface %#x for scale input.\n", input_surface); output_frame = av_frame_alloc(); if (!output_frame) { av_log(ctx, AV_LOG_ERROR, "Failed to allocate output frame."); err = AVERROR(ENOMEM); goto fail; } err = av_hwframe_get_buffer(ctx->output_frames_ref, output_frame, 0); if (err < 0) { av_log(ctx, AV_LOG_ERROR, "Failed to get surface for " "output: %d\n.", err); } output_surface = (VASurfaceID)(uintptr_t)output_frame->data[3]; av_log(ctx, AV_LOG_DEBUG, "Using surface %#x for scale output.\n", output_surface); memset(¶ms, 0, sizeof(params)); // If there were top/left cropping, it could be taken into // account here. input_region = (VARectangle) { .x = 0, .y = 0, .width = input_frame->width, .height = input_frame->height, }; params.surface = input_surface; params.surface_region = &input_region; params.surface_color_standard = vaapi_proc_colour_standard(input_frame->colorspace); params.output_region = 0; params.output_background_color = 0xff000000; params.output_color_standard = params.surface_color_standard; params.pipeline_flags = 0; params.filter_flags = VA_FILTER_SCALING_HQ; vas = vaBeginPicture(ctx->hwctx->display, ctx->va_context, output_surface); if (vas != VA_STATUS_SUCCESS) { av_log(ctx, AV_LOG_ERROR, "Failed to attach new picture: " "%d (%s).\n", vas, vaErrorStr(vas)); err = AVERROR(EIO); goto fail; } vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context, VAProcPipelineParameterBufferType, sizeof(params), 1, ¶ms, ¶ms_id); if (vas != VA_STATUS_SUCCESS) { av_log(ctx, AV_LOG_ERROR, "Failed to create parameter buffer: " "%d (%s).\n", vas, vaErrorStr(vas)); err = AVERROR(EIO); goto fail_after_begin; } av_log(ctx, AV_LOG_DEBUG, "Pipeline parameter buffer is %#x.\n", params_id); vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context, ¶ms_id, 1); if (vas != VA_STATUS_SUCCESS) { av_log(ctx, AV_LOG_ERROR, "Failed to render parameter buffer: " "%d (%s).\n", vas, vaErrorStr(vas)); err = AVERROR(EIO); goto fail_after_begin; } vas = vaEndPicture(ctx->hwctx->display, ctx->va_context); if (vas != VA_STATUS_SUCCESS) { av_log(ctx, AV_LOG_ERROR, "Failed to start picture processing: " "%d (%s).\n", vas, vaErrorStr(vas)); err = AVERROR(EIO); goto fail_after_render; } if (ctx->hwctx->driver_quirks & AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS) { vas = vaDestroyBuffer(ctx->hwctx->display, params_id); if (vas != VA_STATUS_SUCCESS) { av_log(ctx, AV_LOG_ERROR, "Failed to free parameter buffer: " "%d (%s).\n", vas, vaErrorStr(vas)); // And ignore. } } av_frame_copy_props(output_frame, input_frame); av_frame_free(&input_frame); av_log(ctx, AV_LOG_DEBUG, "Filter output: %s, %ux%u (%"PRId64").\n", av_get_pix_fmt_name(output_frame->format), output_frame->width, output_frame->height, output_frame->pts); return ff_filter_frame(outlink, output_frame); // We want to make sure that if vaBeginPicture has been called, we also // call vaRenderPicture and vaEndPicture. These calls may well fail or // do something else nasty, but once we're in this failure case there // isn't much else we can do. fail_after_begin: vaRenderPicture(ctx->hwctx->display, ctx->va_context, ¶ms_id, 1); fail_after_render: vaEndPicture(ctx->hwctx->display, ctx->va_context); fail: av_frame_free(&input_frame); av_frame_free(&output_frame); return err; } static av_cold int scale_vaapi_init(AVFilterContext *avctx) { ScaleVAAPIContext *ctx = avctx->priv; ctx->va_config = VA_INVALID_ID; ctx->va_context = VA_INVALID_ID; ctx->valid_ids = 1; if (ctx->output_format_string) { ctx->output_format = av_get_pix_fmt(ctx->output_format_string); if (ctx->output_format == AV_PIX_FMT_NONE) { av_log(ctx, AV_LOG_ERROR, "Invalid output format.\n"); return AVERROR(EINVAL); } } else { // Use the input format once that is configured. ctx->output_format = AV_PIX_FMT_NONE; } return 0; }
static int vaapi_encode_issue(AVCodecContext *avctx, VAAPIEncodePicture *pic) { VAAPIEncodeContext *ctx = avctx->priv_data; VAAPIEncodeSlice *slice; VAStatus vas; int err, i; char data[MAX_PARAM_BUFFER_SIZE]; size_t bit_len; av_log(avctx, AV_LOG_DEBUG, "Issuing encode for pic %"PRId64"/%"PRId64" " "as type %s.\n", pic->display_order, pic->encode_order, picture_type_name[pic->type]); if (pic->nb_refs == 0) { av_log(avctx, AV_LOG_DEBUG, "No reference pictures.\n"); } else { av_log(avctx, AV_LOG_DEBUG, "Refers to:"); for (i = 0; i < pic->nb_refs; i++) { av_log(avctx, AV_LOG_DEBUG, " %"PRId64"/%"PRId64, pic->refs[i]->display_order, pic->refs[i]->encode_order); } av_log(avctx, AV_LOG_DEBUG, ".\n"); } av_assert0(pic->input_available && !pic->encode_issued); for (i = 0; i < pic->nb_refs; i++) { av_assert0(pic->refs[i]); // If we are serialised then the references must have already // completed. If not, they must have been issued but need not // have completed yet. if (ctx->issue_mode == ISSUE_MODE_SERIALISE_EVERYTHING) av_assert0(pic->refs[i]->encode_complete); else av_assert0(pic->refs[i]->encode_issued); } av_log(avctx, AV_LOG_DEBUG, "Input surface is %#x.\n", pic->input_surface); pic->recon_image = av_frame_alloc(); if (!pic->recon_image) { err = AVERROR(ENOMEM); goto fail; } err = av_hwframe_get_buffer(ctx->recon_frames_ref, pic->recon_image, 0); if (err < 0) { err = AVERROR(ENOMEM); goto fail; } pic->recon_surface = (VASurfaceID)(uintptr_t)pic->recon_image->data[3]; av_log(avctx, AV_LOG_DEBUG, "Recon surface is %#x.\n", pic->recon_surface); vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context, VAEncCodedBufferType, MAX_OUTPUT_BUFFER_SIZE, 1, 0, &pic->output_buffer); if (vas != VA_STATUS_SUCCESS) { av_log(avctx, AV_LOG_ERROR, "Failed to create bitstream " "output buffer: %d (%s).\n", vas, vaErrorStr(vas)); err = AVERROR(ENOMEM); goto fail; } av_log(avctx, AV_LOG_DEBUG, "Output buffer is %#x.\n", pic->output_buffer); if (ctx->codec->picture_params_size > 0) { pic->codec_picture_params = av_malloc(ctx->codec->picture_params_size); if (!pic->codec_picture_params) goto fail; memcpy(pic->codec_picture_params, ctx->codec_picture_params, ctx->codec->picture_params_size); } else { av_assert0(!ctx->codec_picture_params); } pic->nb_param_buffers = 0; if (pic->encode_order == 0) { // Global parameter buffers are set on the first picture only. for (i = 0; i < ctx->nb_global_params; i++) { err = vaapi_encode_make_param_buffer(avctx, pic, VAEncMiscParameterBufferType, (char*)ctx->global_params[i], ctx->global_params_size[i]); if (err < 0) goto fail; } } if (pic->type == PICTURE_TYPE_IDR && ctx->codec->init_sequence_params) { err = vaapi_encode_make_param_buffer(avctx, pic, VAEncSequenceParameterBufferType, ctx->codec_sequence_params, ctx->codec->sequence_params_size); if (err < 0) goto fail; } if (ctx->codec->init_picture_params) { err = ctx->codec->init_picture_params(avctx, pic); if (err < 0) { av_log(avctx, AV_LOG_ERROR, "Failed to initialise picture " "parameters: %d.\n", err); goto fail; } err = vaapi_encode_make_param_buffer(avctx, pic, VAEncPictureParameterBufferType, pic->codec_picture_params, ctx->codec->picture_params_size); if (err < 0) goto fail; } if (pic->type == PICTURE_TYPE_IDR) { if (ctx->codec->write_sequence_header) { bit_len = 8 * sizeof(data); err = ctx->codec->write_sequence_header(avctx, data, &bit_len); if (err < 0) { av_log(avctx, AV_LOG_ERROR, "Failed to write per-sequence " "header: %d.\n", err); goto fail; } err = vaapi_encode_make_packed_header(avctx, pic, ctx->codec->sequence_header_type, data, bit_len); if (err < 0) goto fail; } } if (ctx->codec->write_picture_header) { bit_len = 8 * sizeof(data); err = ctx->codec->write_picture_header(avctx, pic, data, &bit_len); if (err < 0) { av_log(avctx, AV_LOG_ERROR, "Failed to write per-picture " "header: %d.\n", err); goto fail; } err = vaapi_encode_make_packed_header(avctx, pic, ctx->codec->picture_header_type, data, bit_len); if (err < 0) goto fail; } if (ctx->codec->write_extra_buffer) { for (i = 0;; i++) { size_t len = sizeof(data); int type; err = ctx->codec->write_extra_buffer(avctx, pic, i, &type, data, &len); if (err == AVERROR_EOF) break; if (err < 0) { av_log(avctx, AV_LOG_ERROR, "Failed to write extra " "buffer %d: %d.\n", i, err); goto fail; } err = vaapi_encode_make_param_buffer(avctx, pic, type, data, len); if (err < 0) goto fail; } } av_assert0(pic->nb_slices <= MAX_PICTURE_SLICES); for (i = 0; i < pic->nb_slices; i++) { slice = av_mallocz(sizeof(*slice)); if (!slice) { err = AVERROR(ENOMEM); goto fail; } pic->slices[i] = slice; if (ctx->codec->slice_params_size > 0) { slice->codec_slice_params = av_mallocz(ctx->codec->slice_params_size); if (!slice->codec_slice_params) { err = AVERROR(ENOMEM); goto fail; } } if (ctx->codec->init_slice_params) { err = ctx->codec->init_slice_params(avctx, pic, slice); if (err < 0) { av_log(avctx, AV_LOG_ERROR, "Failed to initalise slice " "parameters: %d.\n", err); goto fail; } } if (ctx->codec->write_slice_header) { bit_len = 8 * sizeof(data); err = ctx->codec->write_slice_header(avctx, pic, slice, data, &bit_len); if (err < 0) { av_log(avctx, AV_LOG_ERROR, "Failed to write per-slice " "header: %d.\n", err); goto fail; } err = vaapi_encode_make_packed_header(avctx, pic, ctx->codec->slice_header_type, data, bit_len); if (err < 0) goto fail; } if (ctx->codec->init_slice_params) { err = vaapi_encode_make_param_buffer(avctx, pic, VAEncSliceParameterBufferType, slice->codec_slice_params, ctx->codec->slice_params_size); if (err < 0) goto fail; } } vas = vaBeginPicture(ctx->hwctx->display, ctx->va_context, pic->input_surface); if (vas != VA_STATUS_SUCCESS) { av_log(avctx, AV_LOG_ERROR, "Failed to begin picture encode issue: " "%d (%s).\n", vas, vaErrorStr(vas)); err = AVERROR(EIO); goto fail_with_picture; } vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context, pic->param_buffers, pic->nb_param_buffers); if (vas != VA_STATUS_SUCCESS) { av_log(avctx, AV_LOG_ERROR, "Failed to upload encode parameters: " "%d (%s).\n", vas, vaErrorStr(vas)); err = AVERROR(EIO); goto fail_with_picture; } vas = vaEndPicture(ctx->hwctx->display, ctx->va_context); if (vas != VA_STATUS_SUCCESS) { av_log(avctx, AV_LOG_ERROR, "Failed to end picture encode issue: " "%d (%s).\n", vas, vaErrorStr(vas)); err = AVERROR(EIO); goto fail_at_end; } pic->encode_issued = 1; if (ctx->issue_mode == ISSUE_MODE_SERIALISE_EVERYTHING) return vaapi_encode_wait(avctx, pic); else return 0; fail_with_picture: vaEndPicture(ctx->hwctx->display, ctx->va_context); fail: for(i = 0; i < pic->nb_param_buffers; i++) vaDestroyBuffer(ctx->hwctx->display, pic->param_buffers[i]); fail_at_end: av_freep(&pic->codec_picture_params); av_frame_free(&pic->recon_image); return err; }
qint64 VAApiWriter::write( const QByteArray &data ) { VideoFrame *videoFrame = ( VideoFrame * )data.data(); const VASurfaceID curr_id = ( unsigned long )videoFrame->data[ 3 ]; const int field = FFCommon::getField( videoFrame, deinterlace, 0, VA_TOP_FIELD, VA_BOTTOM_FIELD ); #ifdef HAVE_VPP if ( use_vpp ) { const bool do_vpp_deint = field != 0 && vpp_deint != VA_INVALID_ID; bool vpp_ok = false; if ( !do_vpp_deint ) { forward_reference = VA_INVALID_SURFACE; vpp_second = false; } if ( do_vpp_deint && forward_reference == VA_INVALID_SURFACE ) forward_reference = curr_id; if ( !vpp_second && forward_reference == curr_id ) return data.size(); if ( do_vpp_deint && !vpp_second ) { VAProcFilterParameterBufferDeinterlacing *deint_params = NULL; if ( vaMapBuffer( VADisp, vpp_deint, ( void ** )&deint_params ) == VA_STATUS_SUCCESS ) { deint_params->flags = field == VA_TOP_FIELD ? VPP_TFF : VPP_BFF; vaUnmapBuffer( VADisp, vpp_deint ); } } VABufferID pipeline_buf; if ( vaCreateBuffer( VADisp, context_vpp, VAProcPipelineParameterBufferType, sizeof( VAProcPipelineParameterBuffer ), 1, NULL, &pipeline_buf ) == VA_STATUS_SUCCESS ) { VAProcPipelineParameterBuffer *pipeline_param = NULL; if ( vaMapBuffer( VADisp, pipeline_buf, ( void ** )&pipeline_param ) == VA_STATUS_SUCCESS ) { memset( pipeline_param, 0, sizeof *pipeline_param ); pipeline_param->surface = curr_id; pipeline_param->output_background_color = 0xFF000000; if ( do_vpp_deint ) { pipeline_param->num_filters = 1; pipeline_param->filters = &vpp_deint; pipeline_param->num_forward_references = 1; pipeline_param->forward_references = &forward_reference; } vaUnmapBuffer( VADisp, pipeline_buf ); if ( vaBeginPicture( VADisp, context_vpp, id_vpp ) == VA_STATUS_SUCCESS ) { vpp_ok = vaRenderPicture( VADisp, context_vpp, &pipeline_buf, 1 ) == VA_STATUS_SUCCESS; vaEndPicture( VADisp, context_vpp ); } } if ( !vpp_ok ) vaDestroyBuffer( VADisp, pipeline_buf ); } if ( vpp_second ) forward_reference = curr_id; if ( do_vpp_deint ) vpp_second = !vpp_second; if ( ( ok = vpp_ok ) ) draw( id_vpp, do_vpp_deint ? 0 : field ); } else #endif draw( curr_id, field ); paused = false; return data.size(); }
static void h264_decode_frame(int f_width, int f_height, char *framedata, int framesize, int slice_type) { VAStatus va_status; DebugLog(("%s: called for frame of %d bytes (%dx%d) slice_type=%d\n", __FUNCTION__, framesize, width, height, slice_type)); /* Initialize decode pipeline if necessary */ if ( (f_width > cur_width) || (f_height > cur_height) ) { if (va_dpy != NULL) h264_cleanup_decoder(); cur_width = f_width; cur_height = f_height; h264_init_decoder(f_width, f_height); rfbClientLog("%s: decoder initialized\n", __FUNCTION__); } /* Decode frame */ static VAPictureH264 va_picture_h264, va_old_picture_h264; /* The server should always send an I-frame when a new client connects * or when the resolution of the framebuffer changes, but we check * just in case. */ if ( (slice_type != SLICE_TYPE_I) && (num_frames == 0) ) { rfbClientLog("First frame is not an I frame !!! Skipping!!!\n"); return; } DebugLog(("%s: frame_id=%d va_surface_id[%d]=0x%x field_order_count=%d\n", __FUNCTION__, frame_id, sid, va_surface_id[sid], field_order_count)); va_picture_h264.picture_id = va_surface_id[sid]; va_picture_h264.frame_idx = frame_id; va_picture_h264.flags = 0; va_picture_h264.BottomFieldOrderCnt = field_order_count; va_picture_h264.TopFieldOrderCnt = field_order_count; /* Set up picture parameter buffer */ if (va_pic_param_buf_id[sid] == VA_INVALID_ID) { va_status = vaCreateBuffer(va_dpy, va_context_id, VAPictureParameterBufferType, sizeof(VAPictureParameterBufferH264), 1, NULL, &va_pic_param_buf_id[sid]); CHECK_VASTATUS(va_status, "vaCreateBuffer(PicParam)"); } CHECK_SURF(va_surface_id[sid]); VAPictureParameterBufferH264 *pic_param_buf = NULL; va_status = vaMapBuffer(va_dpy, va_pic_param_buf_id[sid], (void **)&pic_param_buf); CHECK_VASTATUS(va_status, "vaMapBuffer(PicParam)"); SetVAPictureParameterBufferH264(pic_param_buf, f_width, f_height); memcpy(&pic_param_buf->CurrPic, &va_picture_h264, sizeof(VAPictureH264)); if (slice_type == SLICE_TYPE_P) { memcpy(&pic_param_buf->ReferenceFrames[0], &va_old_picture_h264, sizeof(VAPictureH264)); pic_param_buf->ReferenceFrames[0].flags = 0; } else if (slice_type != SLICE_TYPE_I) { rfbClientLog("Frame type %d not supported!!!\n"); return; } pic_param_buf->frame_num = frame_id; va_status = vaUnmapBuffer(va_dpy, va_pic_param_buf_id[sid]); CHECK_VASTATUS(va_status, "vaUnmapBuffer(PicParam)"); /* Set up IQ matrix buffer */ if (va_mat_param_buf_id[sid] == VA_INVALID_ID) { va_status = vaCreateBuffer(va_dpy, va_context_id, VAIQMatrixBufferType, sizeof(VAIQMatrixBufferH264), 1, NULL, &va_mat_param_buf_id[sid]); CHECK_VASTATUS(va_status, "vaCreateBuffer(IQMatrix)"); } CHECK_SURF(va_surface_id[sid]); VAIQMatrixBufferH264 *iq_matrix_buf = NULL; va_status = vaMapBuffer(va_dpy, va_mat_param_buf_id[sid], (void **)&iq_matrix_buf); CHECK_VASTATUS(va_status, "vaMapBuffer(IQMatrix)"); static const unsigned char m_MatrixBufferH264[]= { /* ScalingList4x4[6][16] */ 0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10, 0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10, 0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10, 0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10, 0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10, 0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10, /* ScalingList8x8[2][64] */ 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 }; memcpy(iq_matrix_buf, m_MatrixBufferH264, 224); va_status = vaUnmapBuffer(va_dpy, va_mat_param_buf_id[sid]); CHECK_VASTATUS(va_status, "vaUnmapBuffer(IQMatrix)"); VABufferID buffer_ids[2]; buffer_ids[0] = va_pic_param_buf_id[sid]; buffer_ids[1] = va_mat_param_buf_id[sid]; CHECK_SURF(va_surface_id[sid]); va_status = vaRenderPicture(va_dpy, va_context_id, buffer_ids, 2); CHECK_VASTATUS(va_status, "vaRenderPicture"); /* Set up slice parameter buffer */ if (va_sp_param_buf_id[sid] == VA_INVALID_ID) { va_status = vaCreateBuffer(va_dpy, va_context_id, VASliceParameterBufferType, sizeof(VASliceParameterBufferH264), 1, NULL, &va_sp_param_buf_id[sid]); CHECK_VASTATUS(va_status, "vaCreateBuffer(SliceParam)"); } CHECK_SURF(va_surface_id[sid]); VASliceParameterBufferH264 *slice_param_buf = NULL; va_status = vaMapBuffer(va_dpy, va_sp_param_buf_id[sid], (void **)&slice_param_buf); CHECK_VASTATUS(va_status, "vaMapBuffer(SliceParam)"); static int t2_first = 1; if (slice_type == SLICE_TYPE_I) { SetVASliceParameterBufferH264_Intra(slice_param_buf, t2_first); t2_first = 0; } else { SetVASliceParameterBufferH264(slice_param_buf); memcpy(&slice_param_buf->RefPicList0[0], &va_old_picture_h264, sizeof(VAPictureH264)); slice_param_buf->RefPicList0[0].flags = 0; } slice_param_buf->slice_data_bit_offset = 0; slice_param_buf->slice_data_size = framesize; va_status = vaUnmapBuffer(va_dpy, va_sp_param_buf_id[sid]); CHECK_VASTATUS(va_status, "vaUnmapBuffer(SliceParam)"); CHECK_SURF(va_surface_id[sid]); /* Set up slice data buffer and copy H.264 encoded data */ if (va_d_param_buf_id[sid] == VA_INVALID_ID) { /* TODO use estimation matching framebuffer dimensions instead of this large value */ va_status = vaCreateBuffer(va_dpy, va_context_id, VASliceDataBufferType, 4177920, 1, NULL, &va_d_param_buf_id[sid]); /* 1080p size */ CHECK_VASTATUS(va_status, "vaCreateBuffer(SliceData)"); } char *slice_data_buf; va_status = vaMapBuffer(va_dpy, va_d_param_buf_id[sid], (void **)&slice_data_buf); CHECK_VASTATUS(va_status, "vaMapBuffer(SliceData)"); memcpy(slice_data_buf, framedata, framesize); CHECK_SURF(va_surface_id[sid]); va_status = vaUnmapBuffer(va_dpy, va_d_param_buf_id[sid]); CHECK_VASTATUS(va_status, "vaUnmapBuffer(SliceData)"); buffer_ids[0] = va_sp_param_buf_id[sid]; buffer_ids[1] = va_d_param_buf_id[sid]; CHECK_SURF(va_surface_id[sid]); va_status = vaRenderPicture(va_dpy, va_context_id, buffer_ids, 2); CHECK_VASTATUS(va_status, "vaRenderPicture"); va_status = vaEndPicture(va_dpy, va_context_id); CHECK_VASTATUS(va_status, "vaEndPicture"); /* Prepare next one... */ int sid_new = (sid + 1) % SURFACE_NUM; DebugLog(("%s: new Surface ID = %d\n", __FUNCTION__, sid_new)); va_status = vaBeginPicture(va_dpy, va_context_id, va_surface_id[sid_new]); CHECK_VASTATUS(va_status, "vaBeginPicture"); /* Get decoded data */ va_status = vaSyncSurface(va_dpy, va_surface_id[sid]); CHECK_VASTATUS(va_status, "vaSyncSurface"); CHECK_SURF(va_surface_id[sid]); curr_surface = va_surface_id[sid]; sid = sid_new; field_order_count += 2; ++frame_id; if (frame_id > 15) { frame_id = 0; } ++num_frames; memcpy(&va_old_picture_h264, &va_picture_h264, sizeof(VAPictureH264)); }
int ff_vaapi_decode_issue(AVCodecContext *avctx, VAAPIDecodePicture *pic) { VAAPIDecodeContext *ctx = avctx->internal->hwaccel_priv_data; VAStatus vas; int err; av_log(avctx, AV_LOG_DEBUG, "Decode to surface %#x.\n", pic->output_surface); vas = vaBeginPicture(ctx->hwctx->display, ctx->va_context, pic->output_surface); if (vas != VA_STATUS_SUCCESS) { av_log(avctx, AV_LOG_ERROR, "Failed to begin picture decode " "issue: %d (%s).\n", vas, vaErrorStr(vas)); err = AVERROR(EIO); goto fail_with_picture; } vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context, pic->param_buffers, pic->nb_param_buffers); if (vas != VA_STATUS_SUCCESS) { av_log(avctx, AV_LOG_ERROR, "Failed to upload decode " "parameters: %d (%s).\n", vas, vaErrorStr(vas)); err = AVERROR(EIO); goto fail_with_picture; } vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context, pic->slice_buffers, 2 * pic->nb_slices); if (vas != VA_STATUS_SUCCESS) { av_log(avctx, AV_LOG_ERROR, "Failed to upload slices: " "%d (%s).\n", vas, vaErrorStr(vas)); err = AVERROR(EIO); goto fail_with_picture; } vas = vaEndPicture(ctx->hwctx->display, ctx->va_context); if (vas != VA_STATUS_SUCCESS) { av_log(avctx, AV_LOG_ERROR, "Failed to end picture decode " "issue: %d (%s).\n", vas, vaErrorStr(vas)); err = AVERROR(EIO); if (CONFIG_VAAPI_1 || ctx->hwctx->driver_quirks & AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS) goto fail; else goto fail_at_end; } if (CONFIG_VAAPI_1 || ctx->hwctx->driver_quirks & AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS) ff_vaapi_decode_destroy_buffers(avctx, pic); pic->nb_param_buffers = 0; pic->nb_slices = 0; pic->slices_allocated = 0; av_freep(&pic->slice_buffers); return 0; fail_with_picture: vas = vaEndPicture(ctx->hwctx->display, ctx->va_context); if (vas != VA_STATUS_SUCCESS) { av_log(avctx, AV_LOG_ERROR, "Failed to end picture decode " "after error: %d (%s).\n", vas, vaErrorStr(vas)); } fail: ff_vaapi_decode_destroy_buffers(avctx, pic); fail_at_end: return err; }
static int deint_vaapi_filter_frame(AVFilterLink *inlink, AVFrame *input_frame) { AVFilterContext *avctx = inlink->dst; AVFilterLink *outlink = avctx->outputs[0]; DeintVAAPIContext *ctx = avctx->priv; AVFrame *output_frame = NULL; VASurfaceID input_surface, output_surface; VASurfaceID backward_references[MAX_REFERENCES]; VASurfaceID forward_references[MAX_REFERENCES]; VAProcPipelineParameterBuffer params; VAProcFilterParameterBufferDeinterlacing *filter_params; VARectangle input_region; VABufferID params_id; VAStatus vas; void *filter_params_addr = NULL; int err, i, field, current_frame_index; av_log(avctx, AV_LOG_DEBUG, "Filter input: %s, %ux%u (%"PRId64").\n", av_get_pix_fmt_name(input_frame->format), input_frame->width, input_frame->height, input_frame->pts); if (ctx->queue_count < ctx->queue_depth) { ctx->frame_queue[ctx->queue_count++] = input_frame; if (ctx->queue_count < ctx->queue_depth) { // Need more reference surfaces before we can continue. return 0; } } else { av_frame_free(&ctx->frame_queue[0]); for (i = 0; i + 1 < ctx->queue_count; i++) ctx->frame_queue[i] = ctx->frame_queue[i + 1]; ctx->frame_queue[i] = input_frame; } current_frame_index = ctx->pipeline_caps.num_forward_references; input_frame = ctx->frame_queue[current_frame_index]; input_surface = (VASurfaceID)(uintptr_t)input_frame->data[3]; for (i = 0; i < ctx->pipeline_caps.num_forward_references; i++) forward_references[i] = (VASurfaceID)(uintptr_t) ctx->frame_queue[current_frame_index - i - 1]->data[3]; for (i = 0; i < ctx->pipeline_caps.num_backward_references; i++) backward_references[i] = (VASurfaceID)(uintptr_t) ctx->frame_queue[current_frame_index + i + 1]->data[3]; av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for " "deinterlace input.\n", input_surface); av_log(avctx, AV_LOG_DEBUG, "Backward references:"); for (i = 0; i < ctx->pipeline_caps.num_backward_references; i++) av_log(avctx, AV_LOG_DEBUG, " %#x", backward_references[i]); av_log(avctx, AV_LOG_DEBUG, "\n"); av_log(avctx, AV_LOG_DEBUG, "Forward references:"); for (i = 0; i < ctx->pipeline_caps.num_forward_references; i++) av_log(avctx, AV_LOG_DEBUG, " %#x", forward_references[i]); av_log(avctx, AV_LOG_DEBUG, "\n"); for (field = 0; field < ctx->field_rate; field++) { output_frame = ff_get_video_buffer(outlink, ctx->output_width, ctx->output_height); if (!output_frame) { err = AVERROR(ENOMEM); goto fail; } output_surface = (VASurfaceID)(uintptr_t)output_frame->data[3]; av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for " "deinterlace output.\n", output_surface); memset(¶ms, 0, sizeof(params)); input_region = (VARectangle) { .x = 0, .y = 0, .width = input_frame->width, .height = input_frame->height, }; params.surface = input_surface; params.surface_region = &input_region; params.surface_color_standard = vaapi_proc_colour_standard(input_frame->colorspace); params.output_region = NULL; params.output_background_color = 0xff000000; params.output_color_standard = params.surface_color_standard; params.pipeline_flags = 0; params.filter_flags = VA_FRAME_PICTURE; if (!ctx->auto_enable || input_frame->interlaced_frame) { vas = vaMapBuffer(ctx->hwctx->display, ctx->filter_buffer, &filter_params_addr); if (vas != VA_STATUS_SUCCESS) { av_log(avctx, AV_LOG_ERROR, "Failed to map filter parameter " "buffer: %d (%s).\n", vas, vaErrorStr(vas)); err = AVERROR(EIO); goto fail; } filter_params = filter_params_addr; filter_params->flags = 0; if (input_frame->top_field_first) { filter_params->flags |= field ? VA_DEINTERLACING_BOTTOM_FIELD : 0; } else { filter_params->flags |= VA_DEINTERLACING_BOTTOM_FIELD_FIRST; filter_params->flags |= field ? 0 : VA_DEINTERLACING_BOTTOM_FIELD; } filter_params_addr = NULL; vas = vaUnmapBuffer(ctx->hwctx->display, ctx->filter_buffer); if (vas != VA_STATUS_SUCCESS) av_log(avctx, AV_LOG_ERROR, "Failed to unmap filter parameter " "buffer: %d (%s).\n", vas, vaErrorStr(vas)); params.filters = &ctx->filter_buffer; params.num_filters = 1; params.forward_references = forward_references; params.num_forward_references = ctx->pipeline_caps.num_forward_references; params.backward_references = backward_references; params.num_backward_references = ctx->pipeline_caps.num_backward_references; } else { params.filters = NULL; params.num_filters = 0; } vas = vaBeginPicture(ctx->hwctx->display, ctx->va_context, output_surface); if (vas != VA_STATUS_SUCCESS) { av_log(avctx, AV_LOG_ERROR, "Failed to attach new picture: " "%d (%s).\n", vas, vaErrorStr(vas)); err = AVERROR(EIO); goto fail; } vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context, VAProcPipelineParameterBufferType, sizeof(params), 1, ¶ms, ¶ms_id); if (vas != VA_STATUS_SUCCESS) { av_log(avctx, AV_LOG_ERROR, "Failed to create parameter buffer: " "%d (%s).\n", vas, vaErrorStr(vas)); err = AVERROR(EIO); goto fail_after_begin; } av_log(avctx, AV_LOG_DEBUG, "Pipeline parameter buffer is %#x.\n", params_id); vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context, ¶ms_id, 1); if (vas != VA_STATUS_SUCCESS) { av_log(avctx, AV_LOG_ERROR, "Failed to render parameter buffer: " "%d (%s).\n", vas, vaErrorStr(vas)); err = AVERROR(EIO); goto fail_after_begin; } vas = vaEndPicture(ctx->hwctx->display, ctx->va_context); if (vas != VA_STATUS_SUCCESS) { av_log(avctx, AV_LOG_ERROR, "Failed to start picture processing: " "%d (%s).\n", vas, vaErrorStr(vas)); err = AVERROR(EIO); goto fail_after_render; } if (ctx->hwctx->driver_quirks & AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS) { vas = vaDestroyBuffer(ctx->hwctx->display, params_id); if (vas != VA_STATUS_SUCCESS) { av_log(avctx, AV_LOG_ERROR, "Failed to free parameter buffer: " "%d (%s).\n", vas, vaErrorStr(vas)); // And ignore. } } err = av_frame_copy_props(output_frame, input_frame); if (err < 0) goto fail; if (ctx->field_rate == 2) { if (field == 0) output_frame->pts = 2 * input_frame->pts; else output_frame->pts = input_frame->pts + ctx->frame_queue[current_frame_index + 1]->pts; } output_frame->interlaced_frame = 0; av_log(avctx, AV_LOG_DEBUG, "Filter output: %s, %ux%u (%"PRId64").\n", av_get_pix_fmt_name(output_frame->format), output_frame->width, output_frame->height, output_frame->pts); err = ff_filter_frame(outlink, output_frame); if (err < 0) break; } return err; fail_after_begin: vaRenderPicture(ctx->hwctx->display, ctx->va_context, ¶ms_id, 1); fail_after_render: vaEndPicture(ctx->hwctx->display, ctx->va_context); fail: if (filter_params_addr) vaUnmapBuffer(ctx->hwctx->display, ctx->filter_buffer); av_frame_free(&output_frame); return err; } static av_cold int deint_vaapi_init(AVFilterContext *avctx) { DeintVAAPIContext *ctx = avctx->priv; ctx->va_config = VA_INVALID_ID; ctx->va_context = VA_INVALID_ID; ctx->filter_buffer = VA_INVALID_ID; ctx->valid_ids = 1; return 0; }