void ofxGstRTPServer::emitDepthKeyFrame(){ GstClock * clock = gst_pipeline_get_clock(GST_PIPELINE(gst.getPipeline())); gst_object_ref(clock); GstClockTime time = gst_clock_get_time (clock); GstClockTime now = time - gst_element_get_base_time(gst.getPipeline()); gst_object_unref (clock); GstEvent * keyFrameEvent = gst_video_event_new_downstream_force_key_unit(now, time, now, TRUE, 0); gst_element_send_event(appSrcDepth,keyFrameEvent); }
static GstEvent * check_pending_key_unit_event (GstEvent * pending_event, GstSegment * segment, GstClockTime timestamp, guint flags, GstClockTime pending_key_unit_ts) { GstClockTime running_time, stream_time; gboolean all_headers; guint count; GstEvent *event = NULL; g_return_val_if_fail (pending_event != NULL, NULL); g_return_val_if_fail (segment != NULL, NULL); if (pending_event == NULL) goto out; if (GST_CLOCK_TIME_IS_VALID (pending_key_unit_ts) && timestamp == GST_CLOCK_TIME_NONE) goto out; running_time = gst_segment_to_running_time (segment, GST_FORMAT_TIME, timestamp); GST_INFO ("now %" GST_TIME_FORMAT " wanted %" GST_TIME_FORMAT, GST_TIME_ARGS (running_time), GST_TIME_ARGS (pending_key_unit_ts)); if (GST_CLOCK_TIME_IS_VALID (pending_key_unit_ts) && running_time < pending_key_unit_ts) goto out; if (flags & GST_BUFFER_FLAG_DELTA_UNIT) { GST_INFO ("pending force key unit, waiting for keyframe"); goto out; } stream_time = gst_segment_to_stream_time (segment, GST_FORMAT_TIME, timestamp); gst_video_event_parse_upstream_force_key_unit (pending_event, NULL, &all_headers, &count); event = gst_video_event_new_downstream_force_key_unit (timestamp, stream_time, running_time, all_headers, count); gst_event_set_seqnum (event, gst_event_get_seqnum (pending_event)); out: return event; }
static void need_data_callback (GstAppSrc *src, guint length, gpointer user_data) { EncoderStream *stream = (EncoderStream *)user_data; gint current_position; GstBuffer *buffer; GstPad *pad; GstEvent *event; current_position = (stream->current_position + 1) % SOURCE_RING_SIZE; for (;;) { if (stream->state != NULL) { stream->state->last_heartbeat = gst_clock_get_time (stream->system_clock); } /* insure next buffer isn't current buffer */ if ((current_position == stream->source->current_position) || stream->source->current_position == -1) { if ((current_position == stream->source->current_position) && stream->source->eos) { GstFlowReturn ret; ret = gst_app_src_end_of_stream (src); GST_INFO ("EOS of source %s, tell encoder %s, return %s", stream->source->name, stream->name, gst_flow_get_name (ret)); break; } GST_DEBUG ("waiting %s source ready", stream->name); g_usleep (50000); /* wiating 50ms */ continue; } /* first buffer, set caps. */ if (stream->current_position == -1) { GstCaps *caps; caps = gst_sample_get_caps (stream->source->ring[current_position]); gst_app_src_set_caps (src, caps); if (!g_str_has_prefix (gst_caps_to_string (caps), "video")) { /* only for video stream, force key unit */ stream->encoder = NULL; } GST_INFO ("set stream %s caps: %s", stream->name, gst_caps_to_string (caps)); } buffer = gst_sample_get_buffer (stream->source->ring[current_position]); GST_DEBUG ("%s encoder position %d; timestamp %" GST_TIME_FORMAT " source position %d", stream->name, stream->current_position, GST_TIME_ARGS (GST_BUFFER_PTS (buffer)), stream->source->current_position); /* force key unit? */ if ((stream->encoder != NULL) && (stream->encoder->segment_duration != 0)) { if (stream->encoder->duration_accumulation >= stream->encoder->segment_duration) { GstClockTime running_time; stream->encoder->last_segment_duration = stream->encoder->duration_accumulation; running_time = GST_BUFFER_PTS (buffer); pad = gst_element_get_static_pad ((GstElement *)src, "src"); event = gst_video_event_new_downstream_force_key_unit (running_time, running_time, running_time, TRUE, stream->encoder->force_key_count); gst_pad_push_event (pad, event); stream->encoder->force_key_count++; stream->encoder->duration_accumulation = 0; } stream->encoder->duration_accumulation += GST_BUFFER_DURATION (buffer); } /* push buffer */ if (gst_app_src_push_buffer (src, gst_buffer_ref (buffer)) != GST_FLOW_OK) { GST_ERROR ("%s, gst_app_src_push_buffer failure.", stream->name); } if (stream->state != NULL) { stream->state->current_timestamp = GST_BUFFER_PTS (buffer); } break; } stream->current_position = current_position; }
static GstFlowReturn gst_scene_change_transform_frame_ip (GstVideoFilter * filter, GstVideoFrame * frame) { GstSceneChange *scenechange = GST_SCENE_CHANGE (filter); GstVideoFrame oldframe; double score_min; double score_max; double threshold; double score; gboolean change; gboolean ret; int i; GST_DEBUG_OBJECT (scenechange, "transform_frame_ip"); if (!scenechange->oldbuf) { scenechange->n_diffs = 0; memset (scenechange->diffs, 0, sizeof (double) * SC_N_DIFFS); scenechange->oldbuf = gst_buffer_ref (frame->buffer); memcpy (&scenechange->oldinfo, &frame->info, sizeof (GstVideoInfo)); return GST_FLOW_OK; } ret = gst_video_frame_map (&oldframe, &scenechange->oldinfo, scenechange->oldbuf, GST_MAP_READ); if (!ret) { GST_ERROR_OBJECT (scenechange, "failed to map old video frame"); return GST_FLOW_ERROR; } score = get_frame_score (&oldframe, frame); gst_video_frame_unmap (&oldframe); gst_buffer_unref (scenechange->oldbuf); scenechange->oldbuf = gst_buffer_ref (frame->buffer); memcpy (&scenechange->oldinfo, &frame->info, sizeof (GstVideoInfo)); memmove (scenechange->diffs, scenechange->diffs + 1, sizeof (double) * (SC_N_DIFFS - 1)); scenechange->diffs[SC_N_DIFFS - 1] = score; scenechange->n_diffs++; score_min = scenechange->diffs[0]; score_max = scenechange->diffs[0]; for (i = 1; i < SC_N_DIFFS - 1; i++) { score_min = MIN (score_min, scenechange->diffs[i]); score_max = MAX (score_max, scenechange->diffs[i]); } threshold = 1.8 * score_max - 0.8 * score_min; if (scenechange->n_diffs > 2) { if (score < 5) { change = FALSE; } else if (score / threshold < 1.0) { change = FALSE; } else if (score / threshold > 2.5) { change = TRUE; } else if (score > 50) { change = TRUE; } else { change = FALSE; } } else { change = FALSE; } #ifdef TESTING if (change != is_shot_change (scenechange->n_diffs)) { g_print ("%d %g %g %g %d\n", scenechange->n_diffs, score / threshold, score, threshold, change); } #endif if (change) { GstEvent *event; GST_INFO_OBJECT (scenechange, "%d %g %g %g %d", scenechange->n_diffs, score / threshold, score, threshold, change); event = gst_video_event_new_downstream_force_key_unit (GST_BUFFER_PTS (frame->buffer), GST_CLOCK_TIME_NONE, GST_CLOCK_TIME_NONE, FALSE, scenechange->count++); gst_pad_push_event (GST_BASE_TRANSFORM_SRC_PAD (scenechange), event); } return GST_FLOW_OK; }