static gboolean gst_vp8_enc_stop (GstBaseVideoEncoder * base_video_encoder) { GstVP8Enc *encoder; GST_DEBUG_OBJECT (base_video_encoder, "stop"); encoder = GST_VP8_ENC (base_video_encoder); if (encoder->inited) { vpx_codec_destroy (&encoder->encoder); encoder->inited = FALSE; } if (encoder->first_pass_cache_content) { g_byte_array_free (encoder->first_pass_cache_content, TRUE); encoder->first_pass_cache_content = NULL; } if (encoder->last_pass_cache_content.buf) { g_free (encoder->last_pass_cache_content.buf); encoder->last_pass_cache_content.buf = NULL; encoder->last_pass_cache_content.sz = 0; } gst_tag_setter_reset_tags (GST_TAG_SETTER (encoder)); return TRUE; }
static void gst_vp8_enc_set_property (GObject * object, guint prop_id, const GValue * value, GParamSpec * pspec) { GstVP8Enc *gst_vp8_enc; g_return_if_fail (GST_IS_VP8_ENC (object)); gst_vp8_enc = GST_VP8_ENC (object); GST_DEBUG_OBJECT (object, "gst_vp8_enc_set_property"); switch (prop_id) { case PROP_BITRATE: gst_vp8_enc->bitrate = g_value_get_int (value); break; case PROP_MODE: gst_vp8_enc->mode = g_value_get_enum (value); break; case PROP_QUALITY: gst_vp8_enc->quality = g_value_get_double (value); break; case PROP_ERROR_RESILIENT: gst_vp8_enc->error_resilient = g_value_get_boolean (value); break; case PROP_MAX_LATENCY: gst_vp8_enc->max_latency = g_value_get_int (value); break; case PROP_MAX_KEYFRAME_DISTANCE: gst_vp8_enc->max_keyframe_distance = g_value_get_int (value); break; case PROP_SPEED: gst_vp8_enc->speed = g_value_get_int (value); break; case PROP_THREADS: gst_vp8_enc->threads = g_value_get_int (value); break; case PROP_MULTIPASS_MODE: gst_vp8_enc->multipass_mode = g_value_get_enum (value); break; case PROP_MULTIPASS_CACHE_FILE: if (gst_vp8_enc->multipass_cache_file) g_free (gst_vp8_enc->multipass_cache_file); gst_vp8_enc->multipass_cache_file = g_value_dup_string (value); break; case PROP_AUTO_ALT_REF_FRAMES: gst_vp8_enc->auto_alt_ref_frames = g_value_get_boolean (value); break; default: break; } }
static GstFlowReturn gst_vp8_enc_handle_frame (GstBaseVideoEncoder * base_video_encoder, GstVideoFrame * frame) { GstVP8Enc *encoder; const GstVideoState *state; vpx_codec_err_t status; int flags = 0; vpx_image_t *image; GstVP8EncCoderHook *hook; int quality; GST_DEBUG_OBJECT (base_video_encoder, "handle_frame"); encoder = GST_VP8_ENC (base_video_encoder); state = gst_base_video_encoder_get_state (base_video_encoder); encoder->n_frames++; GST_DEBUG_OBJECT (base_video_encoder, "size %d %d", state->width, state->height); image = gst_vp8_enc_buffer_to_image (encoder, frame->sink_buffer); hook = g_slice_new0 (GstVP8EncCoderHook); hook->image = image; frame->coder_hook = hook; frame->coder_hook_destroy_notify = (GDestroyNotify) gst_vp8_enc_coder_hook_free; if (frame->force_keyframe) { flags |= VPX_EFLAG_FORCE_KF; } quality = (encoder->speed == 0) ? VPX_DL_BEST_QUALITY : VPX_DL_GOOD_QUALITY; status = vpx_codec_encode (&encoder->encoder, image, encoder->n_frames, 1, flags, quality); if (status != 0) { GST_ELEMENT_ERROR (encoder, LIBRARY, ENCODE, ("Failed to encode frame"), ("%s", gst_vpx_error_name (status))); g_slice_free (GstVP8EncCoderHook, hook); frame->coder_hook = NULL; g_slice_free (vpx_image_t, image); return FALSE; } return gst_vp8_enc_process (encoder); }
static void gst_vp8_enc_get_property (GObject * object, guint prop_id, GValue * value, GParamSpec * pspec) { GstVP8Enc *gst_vp8_enc; g_return_if_fail (GST_IS_VP8_ENC (object)); gst_vp8_enc = GST_VP8_ENC (object); switch (prop_id) { case PROP_BITRATE: g_value_set_int (value, gst_vp8_enc->bitrate); break; case PROP_MODE: g_value_set_enum (value, gst_vp8_enc->mode); break; case PROP_QUALITY: g_value_set_double (value, gst_vp8_enc->quality); break; case PROP_ERROR_RESILIENT: g_value_set_boolean (value, gst_vp8_enc->error_resilient); break; case PROP_MAX_LATENCY: g_value_set_int (value, gst_vp8_enc->max_latency); break; case PROP_MAX_KEYFRAME_DISTANCE: g_value_set_int (value, gst_vp8_enc->max_keyframe_distance); break; case PROP_SPEED: g_value_set_int (value, gst_vp8_enc->speed); break; case PROP_THREADS: g_value_set_int (value, gst_vp8_enc->threads); break; case PROP_MULTIPASS_MODE: g_value_set_enum (value, gst_vp8_enc->multipass_mode); break; case PROP_MULTIPASS_CACHE_FILE: g_value_set_string (value, gst_vp8_enc->multipass_cache_file); break; case PROP_AUTO_ALT_REF_FRAMES: g_value_set_boolean (value, gst_vp8_enc->auto_alt_ref_frames); break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } }
static void gst_vp8_enc_finalize (GObject * object) { GstVP8Enc *gst_vp8_enc; GST_DEBUG_OBJECT (object, "finalize"); g_return_if_fail (GST_IS_VP8_ENC (object)); gst_vp8_enc = GST_VP8_ENC (object); g_free (gst_vp8_enc->multipass_cache_file); gst_vp8_enc->multipass_cache_file = NULL; G_OBJECT_CLASS (parent_class)->finalize (object); }
static gboolean gst_vp8_enc_sink_event (GstBaseVideoEncoder * benc, GstEvent * event) { GstVP8Enc *enc = GST_VP8_ENC (benc); if (GST_EVENT_TYPE (event) == GST_EVENT_TAG) { GstTagList *list; GstTagSetter *setter = GST_TAG_SETTER (enc); const GstTagMergeMode mode = gst_tag_setter_get_tag_merge_mode (setter); gst_event_parse_tag (event, &list); gst_tag_setter_merge_tags (setter, list, mode); } /* just peeked, baseclass handles the rest */ return FALSE; }
static gboolean gst_vp8_enc_sink_event (GstPad * pad, GstEvent * event) { GstVP8Enc *enc = GST_VP8_ENC (gst_pad_get_parent (pad)); gboolean ret; if (GST_EVENT_TYPE (event) == GST_EVENT_TAG) { GstTagList *list; GstTagSetter *setter = GST_TAG_SETTER (enc); const GstTagMergeMode mode = gst_tag_setter_get_tag_merge_mode (setter); gst_event_parse_tag (event, &list); gst_tag_setter_merge_tags (setter, list, mode); } ret = enc->base_sink_event_func (pad, event); gst_object_unref (enc); return ret; }
static GstFlowReturn gst_vp8_enc_finish (GstBaseVideoEncoder * base_video_encoder) { GstVP8Enc *encoder; int flags = 0; vpx_codec_err_t status; GST_DEBUG_OBJECT (base_video_encoder, "finish"); encoder = GST_VP8_ENC (base_video_encoder); status = vpx_codec_encode (&encoder->encoder, NULL, encoder->n_frames, 1, flags, 0); if (status != 0) { GST_ERROR_OBJECT (encoder, "encode returned %d %s", status, gst_vpx_error_name (status)); return GST_FLOW_ERROR; } /* dispatch remaining frames */ gst_vp8_enc_process (encoder); if (encoder->multipass_mode == VPX_RC_FIRST_PASS && encoder->multipass_cache_file) { GError *err = NULL; if (!g_file_set_contents (encoder->multipass_cache_file, (const gchar *) encoder->first_pass_cache_content->data, encoder->first_pass_cache_content->len, &err)) { GST_ELEMENT_ERROR (encoder, RESOURCE, WRITE, (NULL), ("Failed to write multipass cache file: %s", err->message)); g_error_free (err); } } return GST_FLOW_OK; }
static GstFlowReturn gst_vp8_enc_shape_output (GstBaseVideoEncoder * base_video_encoder, GstVideoFrame * frame) { GstVP8Enc *encoder; GstBuffer *buf; const GstVideoState *state; GstFlowReturn ret; GstVP8EncCoderHook *hook = frame->coder_hook; GList *l; gint inv_count; GST_DEBUG_OBJECT (base_video_encoder, "shape_output"); encoder = GST_VP8_ENC (base_video_encoder); state = gst_base_video_encoder_get_state (base_video_encoder); g_assert (hook != NULL); for (inv_count = 0, l = hook->invisible; l; inv_count++, l = l->next) { buf = l->data; l->data = NULL; if (l == hook->invisible && frame->is_sync_point) { GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT); encoder->keyframe_distance = 0; } else { GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT); encoder->keyframe_distance++; } GST_BUFFER_OFFSET_END (buf) = _to_granulepos (frame->presentation_frame_number + 1, inv_count, encoder->keyframe_distance); GST_BUFFER_OFFSET (buf) = gst_util_uint64_scale (frame->presentation_frame_number + 1, GST_SECOND * state->fps_d, state->fps_n); gst_buffer_set_caps (buf, GST_BASE_VIDEO_CODEC (base_video_encoder)->caps); ret = gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), buf); if (ret != GST_FLOW_OK) { GST_WARNING_OBJECT (encoder, "flow error %d", ret); goto done; } } buf = frame->src_buffer; frame->src_buffer = NULL; if (!hook->invisible && frame->is_sync_point) { GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT); encoder->keyframe_distance = 0; } else { GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT); encoder->keyframe_distance++; } GST_BUFFER_OFFSET_END (buf) = _to_granulepos (frame->presentation_frame_number + 1, 0, encoder->keyframe_distance); GST_BUFFER_OFFSET (buf) = gst_util_uint64_scale (frame->presentation_frame_number + 1, GST_SECOND * state->fps_d, state->fps_n); ret = gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), buf); if (ret != GST_FLOW_OK) { GST_WARNING_OBJECT (encoder, "flow error %d", ret); } done: if (hook) { g_list_foreach (hook->invisible, (GFunc) _gst_mini_object_unref0, NULL); g_list_free (hook->invisible); g_slice_free (GstVP8EncCoderHook, hook); frame->coder_hook = NULL; } return ret; }
static gboolean gst_vp8_enc_handle_frame (GstBaseVideoEncoder * base_video_encoder, GstVideoFrame * frame) { GstVP8Enc *encoder; const GstVideoState *state; vpx_codec_err_t status; int flags = 0; vpx_codec_iter_t iter = NULL; const vpx_codec_cx_pkt_t *pkt; vpx_image_t *image; GstVP8EncCoderHook *hook; GST_DEBUG_OBJECT (base_video_encoder, "handle_frame"); encoder = GST_VP8_ENC (base_video_encoder); state = gst_base_video_encoder_get_state (base_video_encoder); encoder->n_frames++; GST_DEBUG_OBJECT (base_video_encoder, "size %d %d", state->width, state->height); if (!encoder->inited) { vpx_codec_enc_cfg_t cfg; status = vpx_codec_enc_config_default (&vpx_codec_vp8_cx_algo, &cfg, 0); if (status != VPX_CODEC_OK) { GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, ("Failed to get default encoder configuration"), ("%s", gst_vpx_error_name (status))); return FALSE; } cfg.g_w = state->width; cfg.g_h = state->height; cfg.g_timebase.num = state->fps_d; cfg.g_timebase.den = state->fps_n; cfg.g_error_resilient = encoder->error_resilient; cfg.g_lag_in_frames = encoder->max_latency; cfg.g_threads = encoder->threads; cfg.rc_end_usage = encoder->mode; if (encoder->bitrate) { cfg.rc_target_bitrate = encoder->bitrate / 1000; } else { cfg.rc_min_quantizer = 63 - encoder->quality * 5.0; cfg.rc_max_quantizer = 63 - encoder->quality * 5.0; cfg.rc_target_bitrate = encoder->bitrate; } cfg.kf_mode = VPX_KF_AUTO; cfg.kf_min_dist = 0; cfg.kf_max_dist = encoder->max_keyframe_distance; cfg.g_pass = encoder->multipass_mode; if (encoder->multipass_mode == VPX_RC_FIRST_PASS) { encoder->first_pass_cache_content = g_byte_array_sized_new (4096); } else if (encoder->multipass_mode == VPX_RC_LAST_PASS) { GError *err = NULL; if (!encoder->multipass_cache_file) { GST_ELEMENT_ERROR (encoder, RESOURCE, OPEN_READ, ("No multipass cache file provided"), (NULL)); return GST_FLOW_ERROR; } if (!g_file_get_contents (encoder->multipass_cache_file, (gchar **) & encoder->last_pass_cache_content.buf, &encoder->last_pass_cache_content.sz, &err)) { GST_ELEMENT_ERROR (encoder, RESOURCE, OPEN_READ, ("Failed to read multipass cache file provided"), ("%s", err->message)); g_error_free (err); return GST_FLOW_ERROR; } cfg.rc_twopass_stats_in = encoder->last_pass_cache_content; } status = vpx_codec_enc_init (&encoder->encoder, &vpx_codec_vp8_cx_algo, &cfg, 0); if (status != VPX_CODEC_OK) { GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, ("Failed to initialize encoder"), ("%s", gst_vpx_error_name (status))); return GST_FLOW_ERROR; } status = vpx_codec_control (&encoder->encoder, VP8E_SET_CPUUSED, 0); if (status != VPX_CODEC_OK) { GST_WARNING_OBJECT (encoder, "Failed to set VP8E_SET_CPUUSED to 0: %s", gst_vpx_error_name (status)); } status = vpx_codec_control (&encoder->encoder, VP8E_SET_ENABLEAUTOALTREF, (encoder->auto_alt_ref_frames ? 1 : 0)); if (status != VPX_CODEC_OK) { GST_WARNING_OBJECT (encoder, "Failed to set VP8E_ENABLEAUTOALTREF to %d: %s", (encoder->auto_alt_ref_frames ? 1 : 0), gst_vpx_error_name (status)); } gst_base_video_encoder_set_latency (base_video_encoder, 0, gst_util_uint64_scale (encoder->max_latency, state->fps_d * GST_SECOND, state->fps_n)); encoder->inited = TRUE; } image = gst_vp8_enc_buffer_to_image (encoder, frame->sink_buffer); hook = g_slice_new0 (GstVP8EncCoderHook); hook->image = image; frame->coder_hook = hook; if (encoder->force_keyframe) { flags |= VPX_EFLAG_FORCE_KF; } status = vpx_codec_encode (&encoder->encoder, image, encoder->n_frames, 1, flags, speed_table[encoder->speed]); if (status != 0) { GST_ELEMENT_ERROR (encoder, LIBRARY, ENCODE, ("Failed to encode frame"), ("%s", gst_vpx_error_name (status))); g_slice_free (GstVP8EncCoderHook, hook); frame->coder_hook = NULL; g_slice_free (vpx_image_t, image); return FALSE; } pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter); while (pkt != NULL) { GstBuffer *buffer; gboolean invisible; GST_DEBUG_OBJECT (encoder, "packet %u type %d", (guint) pkt->data.frame.sz, pkt->kind); if (pkt->kind == VPX_CODEC_STATS_PKT && encoder->multipass_mode == VPX_RC_FIRST_PASS) { GST_LOG_OBJECT (encoder, "handling STATS packet"); g_byte_array_append (encoder->first_pass_cache_content, pkt->data.twopass_stats.buf, pkt->data.twopass_stats.sz); frame = gst_base_video_encoder_get_oldest_frame (base_video_encoder); if (frame != NULL) { buffer = gst_buffer_new (); GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_PREROLL); frame->src_buffer = buffer; gst_base_video_encoder_finish_frame (base_video_encoder, frame); } pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter); continue; } else if (pkt->kind != VPX_CODEC_CX_FRAME_PKT) { GST_LOG_OBJECT (encoder, "non frame pkt: %d", pkt->kind); pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter); continue; } invisible = (pkt->data.frame.flags & VPX_FRAME_IS_INVISIBLE) != 0; frame = gst_base_video_encoder_get_oldest_frame (base_video_encoder); g_assert (frame != NULL); frame->is_sync_point = (pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0; hook = frame->coder_hook; buffer = gst_buffer_new_and_alloc (pkt->data.frame.sz); memcpy (GST_BUFFER_DATA (buffer), pkt->data.frame.buf, pkt->data.frame.sz); if (hook->image) g_slice_free (vpx_image_t, hook->image); hook->image = NULL; if (invisible) { hook->invisible = g_list_append (hook->invisible, buffer); } else { frame->src_buffer = buffer; gst_base_video_encoder_finish_frame (base_video_encoder, frame); } pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter); } return TRUE; }
static gboolean gst_vp8_enc_finish (GstBaseVideoEncoder * base_video_encoder) { GstVP8Enc *encoder; GstVideoFrame *frame; int flags = 0; vpx_codec_err_t status; vpx_codec_iter_t iter = NULL; const vpx_codec_cx_pkt_t *pkt; GST_DEBUG_OBJECT (base_video_encoder, "finish"); encoder = GST_VP8_ENC (base_video_encoder); status = vpx_codec_encode (&encoder->encoder, NULL, encoder->n_frames, 1, flags, 0); if (status != 0) { GST_ERROR_OBJECT (encoder, "encode returned %d %s", status, gst_vpx_error_name (status)); return FALSE; } pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter); while (pkt != NULL) { GstBuffer *buffer; GstVP8EncCoderHook *hook; gboolean invisible, keyframe; GST_DEBUG_OBJECT (encoder, "packet %u type %d", (guint) pkt->data.frame.sz, pkt->kind); if (pkt->kind == VPX_CODEC_STATS_PKT && encoder->multipass_mode == VPX_RC_FIRST_PASS) { GST_LOG_OBJECT (encoder, "handling STATS packet"); g_byte_array_append (encoder->first_pass_cache_content, pkt->data.twopass_stats.buf, pkt->data.twopass_stats.sz); frame = gst_base_video_encoder_get_oldest_frame (base_video_encoder); if (frame != NULL) { buffer = gst_buffer_new (); GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_PREROLL); frame->src_buffer = buffer; gst_base_video_encoder_finish_frame (base_video_encoder, frame); } pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter); continue; } else if (pkt->kind != VPX_CODEC_CX_FRAME_PKT) { GST_LOG_OBJECT (encoder, "non frame pkt: %d", pkt->kind); pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter); continue; } invisible = (pkt->data.frame.flags & VPX_FRAME_IS_INVISIBLE) != 0; keyframe = (pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0; frame = gst_base_video_encoder_get_oldest_frame (base_video_encoder); g_assert (frame != NULL); hook = frame->coder_hook; buffer = gst_buffer_new_and_alloc (pkt->data.frame.sz); memcpy (GST_BUFFER_DATA (buffer), pkt->data.frame.buf, pkt->data.frame.sz); frame->is_sync_point = frame->is_sync_point || keyframe; if (hook->image) g_slice_free (vpx_image_t, hook->image); hook->image = NULL; if (invisible) { hook->invisible = g_list_append (hook->invisible, buffer); } else { frame->src_buffer = buffer; gst_base_video_encoder_finish_frame (base_video_encoder, frame); frame = NULL; } pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter); } if (encoder->multipass_mode == VPX_RC_FIRST_PASS && encoder->multipass_cache_file) { GError *err = NULL; if (!g_file_set_contents (encoder->multipass_cache_file, (const gchar *) encoder->first_pass_cache_content->data, encoder->first_pass_cache_content->len, &err)) { GST_ELEMENT_ERROR (encoder, RESOURCE, WRITE, (NULL), ("Failed to write multipass cache file: %s", err->message)); g_error_free (err); } } return TRUE; }
static GstFlowReturn gst_vp8_enc_pre_push (GstVideoEncoder * video_encoder, GstVideoCodecFrame * frame) { GstVP8Enc *encoder; GstVPXEnc *vpx_enc; GstBuffer *buf; GstFlowReturn ret = GST_FLOW_OK; GstVP8EncUserData *user_data = gst_video_codec_frame_get_user_data (frame); GList *l; gint inv_count; GstVideoInfo *info; GST_DEBUG_OBJECT (video_encoder, "pre_push"); encoder = GST_VP8_ENC (video_encoder); vpx_enc = GST_VPX_ENC (encoder); info = &vpx_enc->input_state->info; g_assert (user_data != NULL); for (inv_count = 0, l = user_data->invisible; l; inv_count++, l = l->next) { buf = l->data; l->data = NULL; /* FIXME : All of this should have already been handled by base classes, no ? */ if (l == user_data->invisible && GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)) { GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT); encoder->keyframe_distance = 0; } else { GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT); encoder->keyframe_distance++; } GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DECODE_ONLY); GST_BUFFER_TIMESTAMP (buf) = GST_BUFFER_TIMESTAMP (frame->output_buffer); GST_BUFFER_DURATION (buf) = 0; if (GST_VIDEO_INFO_FPS_D (info) == 0 || GST_VIDEO_INFO_FPS_N (info) == 0) { GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET_NONE; GST_BUFFER_OFFSET (buf) = GST_BUFFER_OFFSET_NONE; } else { GST_BUFFER_OFFSET_END (buf) = _to_granulepos (frame->presentation_frame_number + 1, inv_count, encoder->keyframe_distance); GST_BUFFER_OFFSET (buf) = gst_util_uint64_scale (frame->presentation_frame_number + 1, GST_SECOND * GST_VIDEO_INFO_FPS_D (info), GST_VIDEO_INFO_FPS_N (info)); } ret = gst_pad_push (GST_VIDEO_ENCODER_SRC_PAD (video_encoder), buf); if (ret != GST_FLOW_OK) { GST_WARNING_OBJECT (encoder, "flow error %d", ret); goto done; } } buf = frame->output_buffer; /* FIXME : All of this should have already been handled by base classes, no ? */ if (!user_data->invisible && GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)) { GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT); encoder->keyframe_distance = 0; } else { GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT); encoder->keyframe_distance++; } if (GST_VIDEO_INFO_FPS_D (info) == 0 || GST_VIDEO_INFO_FPS_N (info) == 0) { GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET_NONE; GST_BUFFER_OFFSET (buf) = GST_BUFFER_OFFSET_NONE; } else { GST_BUFFER_OFFSET_END (buf) = _to_granulepos (frame->presentation_frame_number + 1, 0, encoder->keyframe_distance); GST_BUFFER_OFFSET (buf) = gst_util_uint64_scale (frame->presentation_frame_number + 1, GST_SECOND * GST_VIDEO_INFO_FPS_D (info), GST_VIDEO_INFO_FPS_N (info)); } GST_LOG_OBJECT (video_encoder, "src ts: %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf))); done: return ret; }
static gboolean gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder, GstVideoState * state) { GstVP8Enc *encoder; vpx_codec_enc_cfg_t cfg; vpx_codec_err_t status; vpx_image_t *image; guint8 *data = NULL; GstCaps *caps; gboolean ret; encoder = GST_VP8_ENC (base_video_encoder); GST_DEBUG_OBJECT (base_video_encoder, "set_format"); if (encoder->inited) { GST_DEBUG_OBJECT (base_video_encoder, "refusing renegotiation"); return FALSE; } status = vpx_codec_enc_config_default (&vpx_codec_vp8_cx_algo, &cfg, 0); if (status != VPX_CODEC_OK) { GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, ("Failed to get default encoder configuration"), ("%s", gst_vpx_error_name (status))); return FALSE; } /* Scale default bitrate to our size */ cfg.rc_target_bitrate = gst_util_uint64_scale (cfg.rc_target_bitrate, state->width * state->height, cfg.g_w * cfg.g_h); cfg.g_w = state->width; cfg.g_h = state->height; cfg.g_timebase.num = state->fps_d; cfg.g_timebase.den = state->fps_n; cfg.g_error_resilient = encoder->error_resilient; cfg.g_lag_in_frames = encoder->max_latency; cfg.g_threads = encoder->threads; cfg.rc_end_usage = encoder->mode; cfg.rc_2pass_vbr_minsection_pct = encoder->minsection_pct; cfg.rc_2pass_vbr_maxsection_pct = encoder->maxsection_pct; /* Standalone qp-min do not make any sence, with bitrate=0 and qp-min=1 * encoder will use only default qp-max=63. Also this will make * worst possbile quality. */ if (encoder->bitrate != DEFAULT_BITRATE || encoder->max_quantizer != DEFAULT_MAX_QUANTIZER) { cfg.rc_target_bitrate = encoder->bitrate / 1000; cfg.rc_min_quantizer = encoder->min_quantizer; cfg.rc_max_quantizer = encoder->max_quantizer; } else { cfg.rc_min_quantizer = (gint) (63 - encoder->quality * 6.2); cfg.rc_max_quantizer = (gint) (63 - encoder->quality * 6.2); } cfg.rc_dropframe_thresh = encoder->drop_frame; cfg.rc_resize_allowed = encoder->resize_allowed; cfg.kf_mode = VPX_KF_AUTO; cfg.kf_min_dist = 0; cfg.kf_max_dist = encoder->max_keyframe_distance; cfg.g_pass = encoder->multipass_mode; if (encoder->multipass_mode == VPX_RC_FIRST_PASS) { encoder->first_pass_cache_content = g_byte_array_sized_new (4096); } else if (encoder->multipass_mode == VPX_RC_LAST_PASS) { GError *err = NULL; if (!encoder->multipass_cache_file) { GST_ELEMENT_ERROR (encoder, RESOURCE, OPEN_READ, ("No multipass cache file provided"), (NULL)); return FALSE; } if (!g_file_get_contents (encoder->multipass_cache_file, (gchar **) & encoder->last_pass_cache_content.buf, &encoder->last_pass_cache_content.sz, &err)) { GST_ELEMENT_ERROR (encoder, RESOURCE, OPEN_READ, ("Failed to read multipass cache file provided"), ("%s", err->message)); g_error_free (err); return FALSE; } cfg.rc_twopass_stats_in = encoder->last_pass_cache_content; } status = vpx_codec_enc_init (&encoder->encoder, &vpx_codec_vp8_cx_algo, &cfg, 0); if (status != VPX_CODEC_OK) { GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, ("Failed to initialize encoder"), ("%s", gst_vpx_error_name (status))); return FALSE; } /* FIXME move this to a set_speed() function */ status = vpx_codec_control (&encoder->encoder, VP8E_SET_CPUUSED, (encoder->speed == 0) ? 0 : (encoder->speed - 1)); if (status != VPX_CODEC_OK) { GST_WARNING_OBJECT (encoder, "Failed to set VP8E_SET_CPUUSED to 0: %s", gst_vpx_error_name (status)); } status = vpx_codec_control (&encoder->encoder, VP8E_SET_NOISE_SENSITIVITY, encoder->noise_sensitivity); status = vpx_codec_control (&encoder->encoder, VP8E_SET_SHARPNESS, encoder->sharpness); status = vpx_codec_control (&encoder->encoder, VP8E_SET_STATIC_THRESHOLD, encoder->static_threshold); status = vpx_codec_control (&encoder->encoder, VP8E_SET_TOKEN_PARTITIONS, encoder->partitions); #if 0 status = vpx_codec_control (&encoder->encoder, VP8E_SET_ARNR_MAXFRAMES, encoder->arnr_maxframes); status = vpx_codec_control (&encoder->encoder, VP8E_SET_ARNR_STRENGTH, encoder->arnr_strength); status = vpx_codec_control (&encoder->encoder, VP8E_SET_ARNR_TYPE, encoder->arnr_type); #endif #ifdef HAVE_VP8ENC_TUNING status = vpx_codec_control (&encoder->encoder, VP8E_SET_TUNING, encoder->tuning); #endif status = vpx_codec_control (&encoder->encoder, VP8E_SET_ENABLEAUTOALTREF, (encoder->auto_alt_ref_frames ? 1 : 0)); if (status != VPX_CODEC_OK) { GST_WARNING_OBJECT (encoder, "Failed to set VP8E_ENABLEAUTOALTREF to %d: %s", (encoder->auto_alt_ref_frames ? 1 : 0), gst_vpx_error_name (status)); } cfg.g_lag_in_frames = encoder->lag_in_frames; gst_base_video_encoder_set_latency (base_video_encoder, 0, gst_util_uint64_scale (encoder->max_latency, state->fps_d * GST_SECOND, state->fps_n)); encoder->inited = TRUE; /* prepare cached image buffer setup */ image = &encoder->image; memset (image, 0, sizeof (*image)); image->fmt = VPX_IMG_FMT_I420; image->bps = 12; image->x_chroma_shift = image->y_chroma_shift = 1; image->w = image->d_w = state->width; image->h = image->d_h = state->height; image->stride[VPX_PLANE_Y] = gst_video_format_get_row_stride (state->format, 0, state->width); image->stride[VPX_PLANE_U] = gst_video_format_get_row_stride (state->format, 1, state->width); image->stride[VPX_PLANE_V] = gst_video_format_get_row_stride (state->format, 2, state->width); image->planes[VPX_PLANE_Y] = data + gst_video_format_get_component_offset (state->format, 0, state->width, state->height); image->planes[VPX_PLANE_U] = data + gst_video_format_get_component_offset (state->format, 1, state->width, state->height); image->planes[VPX_PLANE_V] = data + gst_video_format_get_component_offset (state->format, 2, state->width, state->height); caps = gst_caps_new_simple ("video/x-vp8", "width", G_TYPE_INT, state->width, "height", G_TYPE_INT, state->height, "framerate", GST_TYPE_FRACTION, state->fps_n, state->fps_d, "pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n, state->par_d, NULL); { GstStructure *s; GstBuffer *stream_hdr, *vorbiscomment; const GstTagList *iface_tags; GValue array = { 0, }; GValue value = { 0, }; s = gst_caps_get_structure (caps, 0); /* put buffers in a fixed list */ g_value_init (&array, GST_TYPE_ARRAY); g_value_init (&value, GST_TYPE_BUFFER); /* Create Ogg stream-info */ stream_hdr = gst_buffer_new_and_alloc (26); data = GST_BUFFER_DATA (stream_hdr); GST_WRITE_UINT8 (data, 0x4F); GST_WRITE_UINT32_BE (data + 1, 0x56503830); /* "VP80" */ GST_WRITE_UINT8 (data + 5, 0x01); /* stream info header */ GST_WRITE_UINT8 (data + 6, 1); /* Major version 1 */ GST_WRITE_UINT8 (data + 7, 0); /* Minor version 0 */ GST_WRITE_UINT16_BE (data + 8, state->width); GST_WRITE_UINT16_BE (data + 10, state->height); GST_WRITE_UINT24_BE (data + 12, state->par_n); GST_WRITE_UINT24_BE (data + 15, state->par_d); GST_WRITE_UINT32_BE (data + 18, state->fps_n); GST_WRITE_UINT32_BE (data + 22, state->fps_d); GST_BUFFER_FLAG_SET (stream_hdr, GST_BUFFER_FLAG_IN_CAPS); gst_value_set_buffer (&value, stream_hdr); gst_value_array_append_value (&array, &value); g_value_unset (&value); gst_buffer_unref (stream_hdr); iface_tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (base_video_encoder)); if (iface_tags) { vorbiscomment = gst_tag_list_to_vorbiscomment_buffer (iface_tags, (const guint8 *) "OVP80\2 ", 7, "Encoded with GStreamer vp8enc " PACKAGE_VERSION); GST_BUFFER_FLAG_SET (vorbiscomment, GST_BUFFER_FLAG_IN_CAPS); g_value_init (&value, GST_TYPE_BUFFER); gst_value_set_buffer (&value, vorbiscomment); gst_value_array_append_value (&array, &value); g_value_unset (&value); gst_buffer_unref (vorbiscomment); } gst_structure_set_value (s, "streamheader", &array); g_value_unset (&array); } ret = gst_pad_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (encoder), caps); gst_caps_unref (caps); return ret; }
static void gst_vp8_enc_get_property (GObject * object, guint prop_id, GValue * value, GParamSpec * pspec) { GstVP8Enc *gst_vp8_enc; g_return_if_fail (GST_IS_VP8_ENC (object)); gst_vp8_enc = GST_VP8_ENC (object); switch (prop_id) { case PROP_BITRATE: g_value_set_int (value, gst_vp8_enc->bitrate); break; case PROP_MODE: g_value_set_enum (value, gst_vp8_enc->mode); break; case PROP_MINSECTION_PCT: g_value_set_uint (value, gst_vp8_enc->minsection_pct); break; case PROP_MAXSECTION_PCT: g_value_set_uint (value, gst_vp8_enc->maxsection_pct); break; case PROP_MIN_QUANTIZER: g_value_set_int (value, gst_vp8_enc->min_quantizer); break; case PROP_MAX_QUANTIZER: g_value_set_int (value, gst_vp8_enc->max_quantizer); break; case PROP_QUALITY: g_value_set_double (value, gst_vp8_enc->quality); break; case PROP_ERROR_RESILIENT: g_value_set_boolean (value, gst_vp8_enc->error_resilient); break; case PROP_MAX_LATENCY: g_value_set_int (value, gst_vp8_enc->max_latency); break; case PROP_MAX_KEYFRAME_DISTANCE: g_value_set_int (value, gst_vp8_enc->max_keyframe_distance); break; case PROP_SPEED: g_value_set_int (value, gst_vp8_enc->speed); break; case PROP_THREADS: g_value_set_int (value, gst_vp8_enc->threads); break; case PROP_MULTIPASS_MODE: g_value_set_enum (value, gst_vp8_enc->multipass_mode); break; case PROP_MULTIPASS_CACHE_FILE: g_value_set_string (value, gst_vp8_enc->multipass_cache_file); break; case PROP_AUTO_ALT_REF_FRAMES: g_value_set_boolean (value, gst_vp8_enc->auto_alt_ref_frames); break; case PROP_LAG_IN_FRAMES: g_value_set_uint (value, gst_vp8_enc->lag_in_frames); break; case PROP_SHARPNESS: g_value_set_int (value, gst_vp8_enc->sharpness); break; case PROP_NOISE_SENSITIVITY: g_value_set_int (value, gst_vp8_enc->noise_sensitivity); break; case PROP_TUNE: #ifdef HAVE_VP8ENC_TUNING g_value_set_enum (value, gst_vp8_enc->tuning); #else GST_WARNING_OBJECT (gst_vp8_enc, "The tuning property is unsupported by this libvpx"); #endif break; case PROP_STATIC_THRESHOLD: g_value_set_int (value, gst_vp8_enc->static_threshold); break; case PROP_DROP_FRAME: g_value_set_int (value, gst_vp8_enc->drop_frame); break; case PROP_RESIZE_ALLOWED: g_value_set_boolean (value, gst_vp8_enc->resize_allowed); break; case PROP_TOKEN_PARTS: g_value_set_int (value, gst_vp8_enc->partitions); break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } }