static int scale_vaapi_config_output(AVFilterLink *outlink) { AVFilterContext *avctx = outlink->src; ScaleVAAPIContext *ctx = avctx->priv; AVVAAPIHWConfig *hwconfig = NULL; AVHWFramesConstraints *constraints = NULL; AVVAAPIFramesContext *va_frames; VAStatus vas; int err, i; scale_vaapi_pipeline_uninit(ctx); ctx->device_ref = av_buffer_ref(ctx->input_frames->device_ref); ctx->hwctx = ((AVHWDeviceContext*)ctx->device_ref->data)->hwctx; av_assert0(ctx->va_config == VA_INVALID_ID); vas = vaCreateConfig(ctx->hwctx->display, VAProfileNone, VAEntrypointVideoProc, 0, 0, &ctx->va_config); if (vas != VA_STATUS_SUCCESS) { av_log(ctx, AV_LOG_ERROR, "Failed to create processing pipeline " "config: %d (%s).\n", vas, vaErrorStr(vas)); err = AVERROR(EIO); goto fail; } hwconfig = av_hwdevice_hwconfig_alloc(ctx->device_ref); if (!hwconfig) { err = AVERROR(ENOMEM); goto fail; } hwconfig->config_id = ctx->va_config; constraints = av_hwdevice_get_hwframe_constraints(ctx->device_ref, hwconfig); if (!constraints) { err = AVERROR(ENOMEM); goto fail; } if (ctx->output_format == AV_PIX_FMT_NONE) ctx->output_format = ctx->input_frames->sw_format; if (constraints->valid_sw_formats) { for (i = 0; constraints->valid_sw_formats[i] != AV_PIX_FMT_NONE; i++) { if (ctx->output_format == constraints->valid_sw_formats[i]) break; } if (constraints->valid_sw_formats[i] == AV_PIX_FMT_NONE) { av_log(ctx, AV_LOG_ERROR, "Hardware does not support output " "format %s.\n", av_get_pix_fmt_name(ctx->output_format)); err = AVERROR(EINVAL); goto fail; } } if (ctx->output_width < constraints->min_width || ctx->output_height < constraints->min_height || ctx->output_width > constraints->max_width || ctx->output_height > constraints->max_height) { av_log(ctx, AV_LOG_ERROR, "Hardware does not support scaling to " "size %dx%d (constraints: width %d-%d height %d-%d).\n", ctx->output_width, ctx->output_height, constraints->min_width, constraints->max_width, constraints->min_height, constraints->max_height); err = AVERROR(EINVAL); goto fail; } ctx->output_frames_ref = av_hwframe_ctx_alloc(ctx->device_ref); if (!ctx->output_frames_ref) { av_log(ctx, AV_LOG_ERROR, "Failed to create HW frame context " "for output.\n"); err = AVERROR(ENOMEM); goto fail; } ctx->output_frames = (AVHWFramesContext*)ctx->output_frames_ref->data; ctx->output_frames->format = AV_PIX_FMT_VAAPI; ctx->output_frames->sw_format = ctx->output_format; ctx->output_frames->width = ctx->output_width; ctx->output_frames->height = ctx->output_height; // The number of output frames we need is determined by what follows // the filter. If it's an encoder with complex frame reference // structures then this could be very high. ctx->output_frames->initial_pool_size = 10; err = av_hwframe_ctx_init(ctx->output_frames_ref); if (err < 0) { av_log(ctx, AV_LOG_ERROR, "Failed to initialise VAAPI frame " "context for output: %d\n", err); goto fail; } va_frames = ctx->output_frames->hwctx; av_assert0(ctx->va_context == VA_INVALID_ID); vas = vaCreateContext(ctx->hwctx->display, ctx->va_config, ctx->output_width, ctx->output_height, VA_PROGRESSIVE, va_frames->surface_ids, va_frames->nb_surfaces, &ctx->va_context); if (vas != VA_STATUS_SUCCESS) { av_log(ctx, AV_LOG_ERROR, "Failed to create processing pipeline " "context: %d (%s).\n", vas, vaErrorStr(vas)); return AVERROR(EIO); } outlink->w = ctx->output_width; outlink->h = ctx->output_height; outlink->hw_frames_ctx = av_buffer_ref(ctx->output_frames_ref); if (!outlink->hw_frames_ctx) { err = AVERROR(ENOMEM); goto fail; } av_freep(&hwconfig); av_hwframe_constraints_free(&constraints); return 0; fail: av_buffer_unref(&ctx->output_frames_ref); av_freep(&hwconfig); av_hwframe_constraints_free(&constraints); return err; }
/* * Set *va_config and the frames_ref fields from the current codec parameters * in avctx. */ static int vaapi_decode_make_config(AVCodecContext *avctx, AVBufferRef *device_ref, VAConfigID *va_config, AVBufferRef *frames_ref) { AVVAAPIHWConfig *hwconfig = NULL; AVHWFramesConstraints *constraints = NULL; VAStatus vas; int err, i, j; const AVCodecDescriptor *codec_desc; VAProfile profile, va_profile, *profile_list = NULL; int profile_count, exact_match, alt_profile; const AVPixFmtDescriptor *sw_desc, *desc; AVHWDeviceContext *device = (AVHWDeviceContext*)device_ref->data; AVVAAPIDeviceContext *hwctx = device->hwctx; codec_desc = avcodec_descriptor_get(avctx->codec_id); if (!codec_desc) { err = AVERROR(EINVAL); goto fail; } profile_count = vaMaxNumProfiles(hwctx->display); profile_list = av_malloc_array(profile_count, sizeof(VAProfile)); if (!profile_list) { err = AVERROR(ENOMEM); goto fail; } vas = vaQueryConfigProfiles(hwctx->display, profile_list, &profile_count); if (vas != VA_STATUS_SUCCESS) { av_log(avctx, AV_LOG_ERROR, "Failed to query profiles: " "%d (%s).\n", vas, vaErrorStr(vas)); err = AVERROR(ENOSYS); goto fail; } profile = VAProfileNone; exact_match = 0; for (i = 0; i < FF_ARRAY_ELEMS(vaapi_profile_map); i++) { int profile_match = 0; if (avctx->codec_id != vaapi_profile_map[i].codec_id) continue; if (avctx->profile == vaapi_profile_map[i].codec_profile) profile_match = 1; profile = vaapi_profile_map[i].va_profile; for (j = 0; j < profile_count; j++) { if (profile == profile_list[j]) { exact_match = profile_match; break; } } if (j < profile_count) { if (exact_match) break; alt_profile = vaapi_profile_map[i].codec_profile; va_profile = vaapi_profile_map[i].va_profile; } } av_freep(&profile_list); if (profile == VAProfileNone) { av_log(avctx, AV_LOG_ERROR, "No support for codec %s " "profile %d.\n", codec_desc->name, avctx->profile); err = AVERROR(ENOSYS); goto fail; } if (!exact_match) { if (avctx->hwaccel_flags & AV_HWACCEL_FLAG_ALLOW_PROFILE_MISMATCH) { av_log(avctx, AV_LOG_VERBOSE, "Codec %s profile %d not " "supported for hardware decode.\n", codec_desc->name, avctx->profile); av_log(avctx, AV_LOG_WARNING, "Using possibly-" "incompatible profile %d instead.\n", alt_profile); profile = va_profile; } else { av_log(avctx, AV_LOG_VERBOSE, "Codec %s profile %d not " "supported for hardware decode.\n", codec_desc->name, avctx->profile); err = AVERROR(EINVAL); goto fail; } } vas = vaCreateConfig(hwctx->display, profile, VAEntrypointVLD, NULL, 0, va_config); if (vas != VA_STATUS_SUCCESS) { av_log(avctx, AV_LOG_ERROR, "Failed to create decode " "configuration: %d (%s).\n", vas, vaErrorStr(vas)); err = AVERROR(EIO); goto fail; } hwconfig = av_hwdevice_hwconfig_alloc(device_ref); if (!hwconfig) { err = AVERROR(ENOMEM); goto fail; } hwconfig->config_id = *va_config; constraints = av_hwdevice_get_hwframe_constraints(device_ref, hwconfig); if (!constraints) { err = AVERROR(ENOMEM); goto fail; } if (avctx->coded_width < constraints->min_width || avctx->coded_height < constraints->min_height || avctx->coded_width > constraints->max_width || avctx->coded_height > constraints->max_height) { av_log(avctx, AV_LOG_ERROR, "Hardware does not support image " "size %dx%d (constraints: width %d-%d height %d-%d).\n", avctx->coded_width, avctx->coded_height, constraints->min_width, constraints->max_width, constraints->min_height, constraints->max_height); err = AVERROR(EINVAL); goto fail; } if (!constraints->valid_sw_formats || constraints->valid_sw_formats[0] == AV_PIX_FMT_NONE) { av_log(avctx, AV_LOG_ERROR, "Hardware does not offer any " "usable surface formats.\n"); err = AVERROR(EINVAL); goto fail; } if (frames_ref) { AVHWFramesContext *frames = (AVHWFramesContext *)frames_ref->data; frames->format = AV_PIX_FMT_VAAPI; frames->width = avctx->coded_width; frames->height = avctx->coded_height; // Find the first format in the list which matches the expected // bit depth and subsampling. If none are found (this can happen // when 10-bit streams are decoded to 8-bit surfaces, for example) // then just take the first format on the list. frames->sw_format = constraints->valid_sw_formats[0]; sw_desc = av_pix_fmt_desc_get(avctx->sw_pix_fmt); for (i = 0; constraints->valid_sw_formats[i] != AV_PIX_FMT_NONE; i++) { desc = av_pix_fmt_desc_get(constraints->valid_sw_formats[i]); if (desc->nb_components != sw_desc->nb_components || desc->log2_chroma_w != sw_desc->log2_chroma_w || desc->log2_chroma_h != sw_desc->log2_chroma_h) continue; for (j = 0; j < desc->nb_components; j++) { if (desc->comp[j].depth != sw_desc->comp[j].depth) break; } if (j < desc->nb_components) continue; frames->sw_format = constraints->valid_sw_formats[i]; break; } frames->initial_pool_size = 1; // Add per-codec number of surfaces used for storing reference frames. switch (avctx->codec_id) { case AV_CODEC_ID_H264: case AV_CODEC_ID_HEVC: frames->initial_pool_size += 16; break; case AV_CODEC_ID_VP9: frames->initial_pool_size += 8; break; case AV_CODEC_ID_VP8: frames->initial_pool_size += 3; break; default: frames->initial_pool_size += 2; } } av_hwframe_constraints_free(&constraints); av_freep(&hwconfig); return 0; fail: av_hwframe_constraints_free(&constraints); av_freep(&hwconfig); if (*va_config != VA_INVALID_ID) { vaDestroyConfig(hwctx->display, *va_config); *va_config = VA_INVALID_ID; } av_freep(&profile_list); return err; }
av_cold int ff_vaapi_encode_init(AVCodecContext *avctx, const VAAPIEncodeType *type) { VAAPIEncodeContext *ctx = avctx->priv_data; AVVAAPIFramesContext *recon_hwctx = NULL; AVVAAPIHWConfig *hwconfig = NULL; AVHWFramesConstraints *constraints = NULL; enum AVPixelFormat recon_format; VAStatus vas; int err, i; if (!avctx->hw_frames_ctx) { av_log(avctx, AV_LOG_ERROR, "A hardware frames reference is " "required to associate the encoding device.\n"); return AVERROR(EINVAL); } ctx->codec = type; ctx->codec_options = ctx->codec_options_data; ctx->priv_data = av_mallocz(type->priv_data_size); if (!ctx->priv_data) { err = AVERROR(ENOMEM); goto fail; } ctx->input_frames_ref = av_buffer_ref(avctx->hw_frames_ctx); if (!ctx->input_frames_ref) { err = AVERROR(ENOMEM); goto fail; } ctx->input_frames = (AVHWFramesContext*)ctx->input_frames_ref->data; ctx->device_ref = av_buffer_ref(ctx->input_frames->device_ref); if (!ctx->device_ref) { err = AVERROR(ENOMEM); goto fail; } ctx->device = (AVHWDeviceContext*)ctx->device_ref->data; ctx->hwctx = ctx->device->hwctx; err = ctx->codec->init(avctx); if (err < 0) goto fail; vas = vaCreateConfig(ctx->hwctx->display, ctx->va_profile, ctx->va_entrypoint, ctx->config_attributes, ctx->nb_config_attributes, &ctx->va_config); if (vas != VA_STATUS_SUCCESS) { av_log(avctx, AV_LOG_ERROR, "Failed to create encode pipeline " "configuration: %d (%s).\n", vas, vaErrorStr(vas)); err = AVERROR(EIO); goto fail; } hwconfig = av_hwdevice_hwconfig_alloc(ctx->device_ref); if (!hwconfig) { err = AVERROR(ENOMEM); goto fail; } hwconfig->config_id = ctx->va_config; constraints = av_hwdevice_get_hwframe_constraints(ctx->device_ref, hwconfig); if (!constraints) { err = AVERROR(ENOMEM); goto fail; } // Probably we can use the input surface format as the surface format // of the reconstructed frames. If not, we just pick the first (only?) // format in the valid list and hope that it all works. recon_format = AV_PIX_FMT_NONE; if (constraints->valid_sw_formats) { for (i = 0; constraints->valid_sw_formats[i] != AV_PIX_FMT_NONE; i++) { if (ctx->input_frames->sw_format == constraints->valid_sw_formats[i]) { recon_format = ctx->input_frames->sw_format; break; } } if (recon_format == AV_PIX_FMT_NONE) recon_format = constraints->valid_sw_formats[i]; } else { // No idea what to use; copy input format. recon_format = ctx->input_frames->sw_format; } av_log(avctx, AV_LOG_DEBUG, "Using %s as format of " "reconstructed frames.\n", av_get_pix_fmt_name(recon_format)); if (ctx->aligned_width < constraints->min_width || ctx->aligned_height < constraints->min_height || ctx->aligned_width > constraints->max_width || ctx->aligned_height > constraints->max_height) { av_log(avctx, AV_LOG_ERROR, "Hardware does not support encoding at " "size %dx%d (constraints: width %d-%d height %d-%d).\n", ctx->aligned_width, ctx->aligned_height, constraints->min_width, constraints->max_width, constraints->min_height, constraints->max_height); err = AVERROR(EINVAL); goto fail; } av_freep(&hwconfig); av_hwframe_constraints_free(&constraints); ctx->recon_frames_ref = av_hwframe_ctx_alloc(ctx->device_ref); if (!ctx->recon_frames_ref) { err = AVERROR(ENOMEM); goto fail; } ctx->recon_frames = (AVHWFramesContext*)ctx->recon_frames_ref->data; ctx->recon_frames->format = AV_PIX_FMT_VAAPI; ctx->recon_frames->sw_format = recon_format; ctx->recon_frames->width = ctx->aligned_width; ctx->recon_frames->height = ctx->aligned_height; ctx->recon_frames->initial_pool_size = ctx->nb_recon_frames; err = av_hwframe_ctx_init(ctx->recon_frames_ref); if (err < 0) { av_log(avctx, AV_LOG_ERROR, "Failed to initialise reconstructed " "frame context: %d.\n", err); goto fail; } recon_hwctx = ctx->recon_frames->hwctx; vas = vaCreateContext(ctx->hwctx->display, ctx->va_config, ctx->aligned_width, ctx->aligned_height, VA_PROGRESSIVE, recon_hwctx->surface_ids, recon_hwctx->nb_surfaces, &ctx->va_context); if (vas != VA_STATUS_SUCCESS) { av_log(avctx, AV_LOG_ERROR, "Failed to create encode pipeline " "context: %d (%s).\n", vas, vaErrorStr(vas)); err = AVERROR(EIO); goto fail; } ctx->input_order = 0; ctx->output_delay = avctx->max_b_frames; ctx->decode_delay = 1; ctx->output_order = - ctx->output_delay - 1; if (ctx->codec->sequence_params_size > 0) { ctx->codec_sequence_params = av_mallocz(ctx->codec->sequence_params_size); if (!ctx->codec_sequence_params) { err = AVERROR(ENOMEM); goto fail; } } if (ctx->codec->picture_params_size > 0) { ctx->codec_picture_params = av_mallocz(ctx->codec->picture_params_size); if (!ctx->codec_picture_params) { err = AVERROR(ENOMEM); goto fail; } } if (ctx->codec->init_sequence_params) { err = ctx->codec->init_sequence_params(avctx); if (err < 0) { av_log(avctx, AV_LOG_ERROR, "Codec sequence initialisation " "failed: %d.\n", err); goto fail; } } // All I are IDR for now. ctx->i_per_idr = 0; ctx->p_per_i = ((avctx->gop_size + avctx->max_b_frames) / (avctx->max_b_frames + 1)); ctx->b_per_p = avctx->max_b_frames; // This should be configurable somehow. (Needs testing on a machine // where it actually overlaps properly, though.) ctx->issue_mode = ISSUE_MODE_MAXIMISE_THROUGHPUT; return 0; fail: av_freep(&hwconfig); av_hwframe_constraints_free(&constraints); ff_vaapi_encode_close(avctx); return err; }
static int deint_vaapi_config_output(AVFilterLink *outlink) { AVFilterContext *avctx = outlink->src; AVFilterLink *inlink = avctx->inputs[0]; DeintVAAPIContext *ctx = avctx->priv; AVVAAPIHWConfig *hwconfig = NULL; AVHWFramesConstraints *constraints = NULL; AVVAAPIFramesContext *va_frames; VAStatus vas; int err; deint_vaapi_pipeline_uninit(avctx); av_assert0(ctx->input_frames); ctx->device_ref = av_buffer_ref(ctx->input_frames->device_ref); ctx->hwctx = ((AVHWDeviceContext*)ctx->device_ref->data)->hwctx; ctx->output_width = ctx->input_frames->width; ctx->output_height = ctx->input_frames->height; av_assert0(ctx->va_config == VA_INVALID_ID); vas = vaCreateConfig(ctx->hwctx->display, VAProfileNone, VAEntrypointVideoProc, 0, 0, &ctx->va_config); if (vas != VA_STATUS_SUCCESS) { av_log(avctx, AV_LOG_ERROR, "Failed to create processing pipeline " "config: %d (%s).\n", vas, vaErrorStr(vas)); err = AVERROR(EIO); goto fail; } hwconfig = av_hwdevice_hwconfig_alloc(ctx->device_ref); if (!hwconfig) { err = AVERROR(ENOMEM); goto fail; } hwconfig->config_id = ctx->va_config; constraints = av_hwdevice_get_hwframe_constraints(ctx->device_ref, hwconfig); if (!constraints) { err = AVERROR(ENOMEM); goto fail; } if (ctx->output_width < constraints->min_width || ctx->output_height < constraints->min_height || ctx->output_width > constraints->max_width || ctx->output_height > constraints->max_height) { av_log(avctx, AV_LOG_ERROR, "Hardware does not support " "deinterlacing to size %dx%d " "(constraints: width %d-%d height %d-%d).\n", ctx->output_width, ctx->output_height, constraints->min_width, constraints->max_width, constraints->min_height, constraints->max_height); err = AVERROR(EINVAL); goto fail; } ctx->output_frames_ref = av_hwframe_ctx_alloc(ctx->device_ref); if (!ctx->output_frames_ref) { av_log(avctx, AV_LOG_ERROR, "Failed to create HW frame context " "for output.\n"); err = AVERROR(ENOMEM); goto fail; } ctx->output_frames = (AVHWFramesContext*)ctx->output_frames_ref->data; ctx->output_frames->format = AV_PIX_FMT_VAAPI; ctx->output_frames->sw_format = ctx->input_frames->sw_format; ctx->output_frames->width = ctx->output_width; ctx->output_frames->height = ctx->output_height; // The number of output frames we need is determined by what follows // the filter. If it's an encoder with complex frame reference // structures then this could be very high. ctx->output_frames->initial_pool_size = 10; err = av_hwframe_ctx_init(ctx->output_frames_ref); if (err < 0) { av_log(avctx, AV_LOG_ERROR, "Failed to initialise VAAPI frame " "context for output: %d\n", err); goto fail; } va_frames = ctx->output_frames->hwctx; av_assert0(ctx->va_context == VA_INVALID_ID); vas = vaCreateContext(ctx->hwctx->display, ctx->va_config, ctx->output_width, ctx->output_height, 0, va_frames->surface_ids, va_frames->nb_surfaces, &ctx->va_context); if (vas != VA_STATUS_SUCCESS) { av_log(avctx, AV_LOG_ERROR, "Failed to create processing pipeline " "context: %d (%s).\n", vas, vaErrorStr(vas)); err = AVERROR(EIO); goto fail; } err = deint_vaapi_build_filter_params(avctx); if (err < 0) goto fail; outlink->w = inlink->w; outlink->h = inlink->h; outlink->time_base = av_mul_q(inlink->time_base, (AVRational) { 1, ctx->field_rate }); outlink->frame_rate = av_mul_q(inlink->frame_rate, (AVRational) { ctx->field_rate, 1 }); outlink->hw_frames_ctx = av_buffer_ref(ctx->output_frames_ref); if (!outlink->hw_frames_ctx) { err = AVERROR(ENOMEM); goto fail; } av_freep(&hwconfig); av_hwframe_constraints_free(&constraints); return 0; fail: av_buffer_unref(&ctx->output_frames_ref); av_freep(&hwconfig); av_hwframe_constraints_free(&constraints); return err; }