static void gst_vp9_enc_init (GstVP9Enc * gst_vp9_enc) { vpx_codec_err_t status; GstVPXEnc *gst_vpx_enc = GST_VPX_ENC (gst_vp9_enc); GST_DEBUG_OBJECT (gst_vp9_enc, "gst_vp9_enc_init"); status = vpx_codec_enc_config_default (gst_vp9_enc_get_algo (gst_vpx_enc), &gst_vpx_enc->cfg, 0); if (status != VPX_CODEC_OK) { GST_ERROR_OBJECT (gst_vpx_enc, "Failed to get default encoder configuration: %s", gst_vpx_error_name (status)); gst_vpx_enc->have_default_config = FALSE; } else { gst_vpx_enc->have_default_config = TRUE; } }
static GstFlowReturn gst_vp8_enc_pre_push (GstVideoEncoder * video_encoder, GstVideoCodecFrame * frame) { GstVP8Enc *encoder; GstVPXEnc *vpx_enc; GstBuffer *buf; GstFlowReturn ret = GST_FLOW_OK; GstVP8EncUserData *user_data = gst_video_codec_frame_get_user_data (frame); GList *l; gint inv_count; GstVideoInfo *info; GST_DEBUG_OBJECT (video_encoder, "pre_push"); encoder = GST_VP8_ENC (video_encoder); vpx_enc = GST_VPX_ENC (encoder); info = &vpx_enc->input_state->info; g_assert (user_data != NULL); for (inv_count = 0, l = user_data->invisible; l; inv_count++, l = l->next) { buf = l->data; l->data = NULL; /* FIXME : All of this should have already been handled by base classes, no ? */ if (l == user_data->invisible && GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)) { GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT); encoder->keyframe_distance = 0; } else { GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT); encoder->keyframe_distance++; } GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DECODE_ONLY); GST_BUFFER_TIMESTAMP (buf) = GST_BUFFER_TIMESTAMP (frame->output_buffer); GST_BUFFER_DURATION (buf) = 0; if (GST_VIDEO_INFO_FPS_D (info) == 0 || GST_VIDEO_INFO_FPS_N (info) == 0) { GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET_NONE; GST_BUFFER_OFFSET (buf) = GST_BUFFER_OFFSET_NONE; } else { GST_BUFFER_OFFSET_END (buf) = _to_granulepos (frame->presentation_frame_number + 1, inv_count, encoder->keyframe_distance); GST_BUFFER_OFFSET (buf) = gst_util_uint64_scale (frame->presentation_frame_number + 1, GST_SECOND * GST_VIDEO_INFO_FPS_D (info), GST_VIDEO_INFO_FPS_N (info)); } ret = gst_pad_push (GST_VIDEO_ENCODER_SRC_PAD (video_encoder), buf); if (ret != GST_FLOW_OK) { GST_WARNING_OBJECT (encoder, "flow error %d", ret); goto done; } } buf = frame->output_buffer; /* FIXME : All of this should have already been handled by base classes, no ? */ if (!user_data->invisible && GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)) { GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT); encoder->keyframe_distance = 0; } else { GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT); encoder->keyframe_distance++; } if (GST_VIDEO_INFO_FPS_D (info) == 0 || GST_VIDEO_INFO_FPS_N (info) == 0) { GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET_NONE; GST_BUFFER_OFFSET (buf) = GST_BUFFER_OFFSET_NONE; } else { GST_BUFFER_OFFSET_END (buf) = _to_granulepos (frame->presentation_frame_number + 1, 0, encoder->keyframe_distance); GST_BUFFER_OFFSET (buf) = gst_util_uint64_scale (frame->presentation_frame_number + 1, GST_SECOND * GST_VIDEO_INFO_FPS_D (info), GST_VIDEO_INFO_FPS_N (info)); } GST_LOG_OBJECT (video_encoder, "src ts: %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf))); done: return ret; }