static void gst_tarkinenc_chain (GstPad * pad, GstData * _data) { GstBuffer *buf = GST_BUFFER (_data); TarkinEnc *tarkinenc; g_return_if_fail (pad != NULL); g_return_if_fail (GST_IS_PAD (pad)); g_return_if_fail (buf != NULL); tarkinenc = GST_TARKINENC (gst_pad_get_parent (pad)); if (!tarkinenc->setup) { GST_ELEMENT_ERROR (tarkinenc, CORE, NEGOTIATION, (NULL), ("encoder not initialized (input is not tarkin?)")); if (GST_IS_BUFFER (buf)) gst_buffer_unref (buf); else gst_pad_event_default (pad, GST_EVENT (buf)); return; } if (GST_IS_EVENT (buf)) { switch (GST_EVENT_TYPE (buf)) { case GST_EVENT_EOS: tarkin_analysis_framein (tarkinenc->tarkin_stream, NULL, 0, NULL); /* EOS */ tarkin_comment_clear (&tarkinenc->tc); tarkin_stream_destroy (tarkinenc->tarkin_stream); default: gst_pad_event_default (pad, GST_EVENT (buf)); break; } } else { gchar *data; gulong size; TarkinTime date; /* data to encode */ data = GST_BUFFER_DATA (buf); size = GST_BUFFER_SIZE (buf); date.numerator = tarkinenc->frame_num; date.denominator = 1; tarkin_analysis_framein (tarkinenc->tarkin_stream, data, 0, &date); tarkinenc->frame_num++; gst_buffer_unref (buf); } }
static gboolean gst_video_scale_src_event (GstBaseTransform * trans, GstEvent * event) { GstVideoScale *videoscale = GST_VIDEO_SCALE (trans); gboolean ret; gdouble a; GstStructure *structure; GST_DEBUG_OBJECT (videoscale, "handling %s event", GST_EVENT_TYPE_NAME (event)); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_NAVIGATION: event = GST_EVENT (gst_mini_object_make_writable (GST_MINI_OBJECT (event))); structure = (GstStructure *) gst_event_get_structure (event); if (gst_structure_get_double (structure, "pointer_x", &a)) { gst_structure_set (structure, "pointer_x", G_TYPE_DOUBLE, a * videoscale->from_width / videoscale->to_width, NULL); } if (gst_structure_get_double (structure, "pointer_y", &a)) { gst_structure_set (structure, "pointer_y", G_TYPE_DOUBLE, a * videoscale->from_height / videoscale->to_height, NULL); } break; default: break; } ret = GST_BASE_TRANSFORM_CLASS (parent_class)->src_event (trans, event); return ret; }
static void dxr3videosink_chain (GstPad * pad, GstData * _data) { GstBuffer *buf = GST_BUFFER (_data); Dxr3VideoSink *sink; g_return_if_fail (pad != NULL); g_return_if_fail (GST_IS_PAD (pad)); g_return_if_fail (buf != NULL); sink = DXR3VIDEOSINK (gst_pad_get_parent (pad)); if (GST_IS_EVENT (buf)) { dxr3videosink_handle_event (pad, GST_EVENT (buf)); return; } /* fprintf (stderr, "^^^^^^ Video block\n"); */ if (sink->cur_buf == NULL) { sink->cur_buf = buf; } else { sink->cur_buf = gst_buffer_append (sink->cur_buf, buf); } sink->last_ts = GST_BUFFER_TIMESTAMP (buf); dxr3videosink_parse_data (sink); }
static void gst_mikmod_loop (GstElement * element) { GstMikMod *mikmod; GstBuffer *buffer_in; g_return_if_fail (element != NULL); g_return_if_fail (GST_IS_MIKMOD (element)); mikmod = GST_MIKMOD (element); srcpad = mikmod->srcpad; mikmod->Buffer = NULL; if (!mikmod->initialized) { while ((buffer_in = GST_BUFFER (gst_pad_pull (mikmod->sinkpad)))) { if (GST_IS_EVENT (buffer_in)) { GstEvent *event = GST_EVENT (buffer_in); if (GST_EVENT_TYPE (event) == GST_EVENT_EOS) break; } else { if (mikmod->Buffer) { mikmod->Buffer = gst_buffer_append (mikmod->Buffer, buffer_in); } else { mikmod->Buffer = buffer_in; } } } if (!GST_PAD_CAPS (mikmod->srcpad)) { if (GST_PAD_LINK_SUCCESSFUL (gst_pad_renegotiate (mikmod->srcpad))) { GST_ELEMENT_ERROR (mikmod, CORE, NEGOTIATION, (NULL), (NULL)); return; } } MikMod_RegisterDriver (&drv_gst); MikMod_RegisterAllLoaders (); MikMod_Init (""); reader = GST_READER_new (mikmod); module = Player_LoadGeneric (reader, 64, 0); gst_buffer_unref (mikmod->Buffer); if (!Player_Active ()) Player_Start (module); mikmod->initialized = TRUE; } if (Player_Active ()) { timestamp = (module->sngtime / 1024.0) * GST_SECOND; drv_gst.Update (); } else { gst_element_set_eos (GST_ELEMENT (mikmod)); gst_pad_push (mikmod->srcpad, GST_DATA (gst_event_new (GST_EVENT_EOS))); } }
static void drop_events (void) { while (events != NULL) { gst_event_unref (GST_EVENT (events->data)); events = g_list_delete_link (events, events); } }
static GstPadProbeReturn event_probe (GstPad * pad, GstPadProbeInfo * info, gpointer user_data) { GstMiniObject *data = GST_PAD_PROBE_INFO_DATA (info); gboolean before_q = (gboolean) GPOINTER_TO_INT (user_data); GST_DEBUG ("event probe called %p", data); fail_unless (GST_IS_EVENT (data)); if (before_q) { switch (GST_EVENT_TYPE (GST_EVENT (data))) { case GST_EVENT_CUSTOM_UPSTREAM: case GST_EVENT_CUSTOM_BOTH: case GST_EVENT_CUSTOM_BOTH_OOB: if (got_event_before_q != NULL) break; gst_event_ref ((GstEvent *) data); g_get_current_time (&got_event_time); got_event_before_q = GST_EVENT (data); break; default: break; } } else { switch (GST_EVENT_TYPE (GST_EVENT (data))) { case GST_EVENT_CUSTOM_DOWNSTREAM: case GST_EVENT_CUSTOM_DOWNSTREAM_OOB: case GST_EVENT_CUSTOM_BOTH: case GST_EVENT_CUSTOM_BOTH_OOB: if (got_event_after_q != NULL) break; gst_event_ref ((GstEvent *) data); g_get_current_time (&got_event_time); got_event_after_q = GST_EVENT (data); break; default: break; } } return GST_PAD_PROBE_OK; }
static gboolean gst_imx_blitter_video_transform_src_event(GstBaseTransform *transform, GstEvent *event) { gdouble a; GstStructure *structure; GstImxBlitterVideoTransform *blitter_video_transform = GST_IMX_BLITTER_VIDEO_TRANSFORM(transform); GST_DEBUG_OBJECT(transform, "handling %s event", GST_EVENT_TYPE_NAME(event)); switch (GST_EVENT_TYPE(event)) { case GST_EVENT_NAVIGATION: { /* scale pointer_x/y values in the event if in- and output have different width/height */ gint in_w = GST_VIDEO_INFO_WIDTH(&(blitter_video_transform->input_video_info)); gint in_h = GST_VIDEO_INFO_HEIGHT(&(blitter_video_transform->input_video_info)); gint out_w = GST_VIDEO_INFO_WIDTH(&(blitter_video_transform->output_video_info)); gint out_h = GST_VIDEO_INFO_HEIGHT(&(blitter_video_transform->output_video_info)); if ((in_w != out_w) || (in_h != out_h)) { event = GST_EVENT(gst_mini_object_make_writable(GST_MINI_OBJECT(event))); structure = (GstStructure *)gst_event_get_structure(event); if (gst_structure_get_double(structure, "pointer_x", &a)) { gst_structure_set( structure, "pointer_x", G_TYPE_DOUBLE, a * in_w / out_w, NULL ); } if (gst_structure_get_double(structure, "pointer_y", &a)) { gst_structure_set( structure, "pointer_y", G_TYPE_DOUBLE, a * in_h / out_h, NULL ); } } break; } default: break; } return GST_BASE_TRANSFORM_CLASS(gst_imx_blitter_video_transform_parent_class)->src_event(transform, event); }
static gpointer push_event_thread_func (gpointer data) { GstEvent *event; event = GST_EVENT (data); GST_DEBUG ("pushing event %p on pad %p", event, mysrcpad); gst_pad_push_event (mysrcpad, event); return NULL; }
static void gst_icydemux_send_cached_events (GstICYDemux * icydemux) { GList *l; for (l = icydemux->cached_events; l != NULL; l = l->next) { GstEvent *event = GST_EVENT (l->data); gst_pad_push_event (icydemux->srcpad, event); } g_list_free (icydemux->cached_events); icydemux->cached_events = NULL; }
static gboolean sink_event_func (GstPad * pad, GstObject * parent, GstEvent * event) { GList **expected = GST_PAD_ELEMENT_PRIVATE (pad), *l; GstEvent *exp; switch (GST_EVENT_TYPE (event)) { case GST_EVENT_CAPS:{ GstCaps *recvcaps, *expectcaps; l = g_list_first (*expected); exp = GST_EVENT (l->data); gst_event_parse_caps (event, &recvcaps); gst_event_parse_caps (exp, &expectcaps); fail_unless (gst_caps_is_equal (recvcaps, expectcaps)); break; } case GST_EVENT_SEGMENT:{ const GstSegment *recvseg, *expectseg; l = g_list_last (*expected); exp = GST_EVENT (l->data); gst_event_parse_segment (event, &recvseg); gst_event_parse_segment (exp, &expectseg); fail_unless_equals_uint64 (recvseg->position, expectseg->position); break; } default: break; } return gst_pad_event_default (pad, parent, event); }
static void gst_type_find_element_send_cached_events (GstTypeFindElement * typefind) { GList *l; for (l = typefind->cached_events; l != NULL; l = l->next) { GstEvent *event = GST_EVENT (l->data); GST_DEBUG_OBJECT (typefind, "sending cached %s event", GST_EVENT_TYPE_NAME (event)); gst_pad_push_event (typefind->src, event); } g_list_free (typefind->cached_events); typefind->cached_events = NULL; }
static gboolean gst_vr_compositor_src_event (GstBaseTransform * trans, GstEvent * event) { GstVRCompositor *self = GST_VR_COMPOSITOR (trans); GST_DEBUG_OBJECT (trans, "handling %s event", GST_EVENT_TYPE_NAME (event)); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_NAVIGATION: event = GST_EVENT (gst_mini_object_make_writable (GST_MINI_OBJECT (event))); gst_3d_scene_send_eos_on_esc (GST_ELEMENT (self), event); gst_3d_scene_navigation_event (self->scene, event); break; default: break; } return GST_BASE_TRANSFORM_CLASS (parent_class)->src_event (trans, event); }
static GstPadProbeReturn gst_wrapper_camera_src_src_event_probe (GstPad * pad, GstPadProbeInfo * info, gpointer udata) { GstPadProbeReturn ret = GST_PAD_PROBE_OK; GstWrapperCameraBinSrc *self = udata; GstEvent *evt = GST_EVENT (info->data); switch (GST_EVENT_TYPE (evt)) { case GST_EVENT_SEGMENT: if (self->drop_newseg) { ret = GST_PAD_PROBE_DROP; self->drop_newseg = FALSE; } break; default: break; } return ret; }
static gboolean gst_ffmpegscale_handle_src_event(GstPad* pad, GstEvent* event) { GstFFMpegScale* scale; GstStructure* structure; gdouble pointer; gboolean res; scale = GST_FFMPEGSCALE(gst_pad_get_parent(pad)); switch (GST_EVENT_TYPE(event)) { case GST_EVENT_NAVIGATION: event = GST_EVENT(gst_mini_object_make_writable(GST_MINI_OBJECT(event))); structure = (GstStructure*) gst_event_get_structure(event); if (gst_structure_get_double(structure, "pointer_x", &pointer)) { gst_structure_set(structure, "pointer_x", G_TYPE_DOUBLE, pointer * scale->in_width / scale->out_width, NULL); } if (gst_structure_get_double(structure, "pointer_y", &pointer)) { gst_structure_set(structure, "pointer_y", G_TYPE_DOUBLE, pointer * scale->in_height / scale->out_height, NULL); } break; default: break; } res = gst_pad_event_default(pad, event); gst_object_unref(scale); return res; }
static void dxr3spusink_chain (GstPad * pad, GstData * _data) { GstBuffer *buf = GST_BUFFER (_data); Dxr3SpuSink *sink; gint bytes_written = 0; g_return_if_fail (pad != NULL); g_return_if_fail (GST_IS_PAD (pad)); g_return_if_fail (buf != NULL); sink = DXR3SPUSINK (gst_pad_get_parent (pad)); if (GST_IS_EVENT (buf)) { dxr3spusink_handle_event (pad, GST_EVENT (buf)); return; } if (GST_OBJECT_FLAG_IS_SET (sink, DXR3SPUSINK_OPEN)) { /* If we have PTS information for the SPU unit, register it now. The card needs the PTS to be written *before* the actual data. */ if (GST_BUFFER_TIMESTAMP (buf) != GST_CLOCK_TIME_NONE) { guint pts = (guint) GSTTIME_TO_MPEGTIME (GST_BUFFER_TIMESTAMP (buf)); ioctl (sink->spu_fd, EM8300_IOCTL_SPU_SETPTS, &pts); } bytes_written = write (sink->spu_fd, GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf)); if (bytes_written < GST_BUFFER_SIZE (buf)) { fprintf (stderr, "dxr3spusink: Warning: %d bytes should be written," " only %d bytes written\n", GST_BUFFER_SIZE (buf), bytes_written); } } gst_buffer_unref (buf); }
static gboolean probe_cb (InsanityGstTest * ptest, GstPad * pad, GstMiniObject * object, gpointer userdata) { InsanityTest *test = INSANITY_TEST (ptest); global_last_probe = g_get_monotonic_time (); if (GST_IS_BUFFER (object)) { GstClockTime buf_start, buf_end; GstBuffer *next_sub, *buf = GST_BUFFER (object); buf_start = gst_segment_to_stream_time (&glob_suboverlay_src_probe->last_segment, glob_suboverlay_src_probe->last_segment.format, GST_BUFFER_PTS (buf)); buf_end = buf_start + GST_BUFFER_DURATION (buf); if (glob_in_progress == TEST_SUBTTILE_DESCRIPTOR_GENERATION) { if (glob_pipeline_restarted == TRUE) { gboolean has_subs; if (glob_duration > 0 && buf_end > glob_duration) { /* Done according to the duration previously found by the * discoverer */ next_test (test); } has_subs = frame_contains_subtitles (buf); if (GST_CLOCK_TIME_IS_VALID (glob_last_subtitled_frame)) { if (has_subs == FALSE) { GstBuffer *nbuf = gst_buffer_new (); GST_BUFFER_PTS (nbuf) = glob_last_subtitled_frame; GST_BUFFER_DURATION (nbuf) = buf_end - glob_last_subtitled_frame; media_descriptor_writer_add_frame (glob_writer, pad, nbuf); glob_last_subtitled_frame = GST_CLOCK_TIME_NONE; gst_buffer_unref (nbuf); } } else if (has_subs) { glob_last_subtitled_frame = buf_start; } } goto done; } /* We played enough... next test */ if (GST_CLOCK_TIME_IS_VALID (glob_first_subtitle_ts) && buf_start >= glob_first_subtitle_ts + glob_playback_duration * GST_SECOND) { next_test (test); } switch (glob_in_progress) { case TEST_NONE: { if (glob_suboverlay_src_probe->waiting_first_segment == TRUE) { insanity_test_validate_checklist_item (test, "first-segment", FALSE, "Got a buffer before the first segment"); } next_test (test); } default: break; } if (glob_subtitled_frames != NULL) { GstClockTime sub_start, sub_end; next_sub = GST_BUFFER (glob_subtitled_frames->data); sub_start = GST_BUFFER_PTS (next_sub); sub_end = GST_BUFFER_DURATION_IS_VALID (next_sub) ? GST_BUFFER_DURATION (next_sub) + sub_start : -1; if (buf_start >= sub_start && buf_end < sub_end) { if (frame_contains_subtitles (buf) == TRUE) { glob_sub_render_found = TRUE; insanity_test_validate_checklist_item (test, "subtitle-rendered", TRUE, NULL); } else { gchar *msg = g_strdup_printf ("Subtitle start %" GST_TIME_FORMAT " end %" GST_TIME_FORMAT " received buffer with no sub start %" GST_TIME_FORMAT " end %" GST_TIME_FORMAT, GST_TIME_ARGS (sub_start), GST_TIME_ARGS (sub_end), GST_TIME_ARGS (buf_start), GST_TIME_ARGS (buf_end)); insanity_test_validate_checklist_item (test, "subtitle-rendered", FALSE, msg); glob_wrong_rendered_buf = TRUE; g_free (msg); } } else if (buf_end > sub_end) { /* We got a buffer that is after the subtitle we were waiting for * remove that buffer as not waiting for it anymore */ gst_buffer_unref (next_sub); glob_subtitled_frames = g_list_remove (glob_subtitled_frames, next_sub); } } } else if (GST_IS_EVENT (object)) { GstEvent *event = GST_EVENT (object); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_SEGMENT: { gst_event_copy_segment (event, &glob_suboverlay_src_probe->last_segment); if (glob_suboverlay_src_probe->waiting_first_segment == TRUE) { insanity_test_validate_checklist_item (test, "first-segment", TRUE, NULL); glob_suboverlay_src_probe->waiting_first_segment = FALSE; } if (glob_suboverlay_src_probe->waiting_segment == FALSE) /* Cache the segment as it will be our reference but don't look * further */ goto done; if (glob_suboverlay_src_probe->waiting_first_segment == TRUE) { /* Make sure that a new segment has been received for each stream */ glob_suboverlay_src_probe->waiting_first_segment = FALSE; glob_suboverlay_src_probe->waiting_segment = FALSE; } glob_suboverlay_src_probe->waiting_segment = FALSE; break; } default: break; } } done: return TRUE; }
/* Pipeline Callbacks */ static gboolean renderer_probe_cb (InsanityGstTest * ptest, GstPad * pad, GstMiniObject * object, gpointer userdata) { InsanityTest *test = INSANITY_TEST (ptest); if (GST_IS_BUFFER (object)) { gint64 stime_ts; GstBuffer *buf = GST_BUFFER (object), *nbuf; if (glob_in_progress == TEST_SUBTTILE_DESCRIPTOR_GENERATION) goto done; if (GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DURATION (buf)) == FALSE && glob_parser == NULL) { gboolean generate_media_desc; insanity_test_get_boolean_argument (test, "create-media-descriptor", (gboolean *) & generate_media_desc); /* We generate the XML file if needed and allowed by user */ if (generate_media_desc) generate_xml_media_descriptor (test); else insanity_test_done (test); } else if (glob_parser == NULL) { /* Avoid using xml descriptor when not needed */ stime_ts = gst_segment_to_stream_time (&glob_renderer_sink_probe->last_segment, glob_renderer_sink_probe->last_segment.format, GST_BUFFER_PTS (buf)); if (GST_CLOCK_TIME_IS_VALID (glob_first_subtitle_ts) == FALSE) glob_first_subtitle_ts = stime_ts; nbuf = gst_buffer_new (); GST_BUFFER_PTS (nbuf) = stime_ts; GST_BUFFER_DURATION (nbuf) = GST_BUFFER_DURATION (buf); glob_subtitled_frames = g_list_insert_sorted (glob_subtitled_frames, nbuf, (GCompareFunc) sort_subtitle_bufs); } } else if (GST_IS_EVENT (object)) { GstEvent *event = GST_EVENT (object); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_SEGMENT: { /* We do not care about event during subtitle generation */ if (glob_in_progress == TEST_SUBTTILE_DESCRIPTOR_GENERATION) goto done; gst_event_copy_segment (event, &glob_renderer_sink_probe->last_segment); if (glob_renderer_sink_probe->waiting_segment == FALSE) /* Cache the segment as it will be our reference but don't look * further */ goto done; if (glob_renderer_sink_probe->waiting_first_segment == TRUE) { /* Make sure that a new segment has been received for each stream */ glob_renderer_sink_probe->waiting_first_segment = FALSE; glob_renderer_sink_probe->waiting_segment = FALSE; } glob_renderer_sink_probe->waiting_segment = FALSE; break; } default: break; } } done: return TRUE; }
static void gst_tarkindec_chain (GstPad * pad, GstData * _data) { GstBuffer *buf = GST_BUFFER (_data); TarkinDec *tarkindec; g_return_if_fail (pad != NULL); g_return_if_fail (GST_IS_PAD (pad)); g_return_if_fail (buf != NULL); tarkindec = GST_TARKINDEC (gst_pad_get_parent (pad)); if (!tarkindec->setup) { GST_ELEMENT_ERROR (tarkindec, CORE, NEGOTATION, (NULL), ("decoder not initialized (input is not tarkin?)")); if (GST_IS_BUFFER (buf)) gst_buffer_unref (buf); else gst_pad_event_default (pad, GST_EVENT (buf)); return; } if (GST_IS_EVENT (buf)) { switch (GST_EVENT_TYPE (buf)) { case GST_EVENT_EOS: default: gst_pad_event_default (pad, GST_EVENT (buf)); break; } } else { gchar *data; gulong size; gchar *buffer; guchar *rgb; TarkinTime date; TarkinVideoLayerDesc *layer; /* data to decode */ data = GST_BUFFER_DATA (buf); size = GST_BUFFER_SIZE (buf); buffer = ogg_sync_buffer (&tarkindec->oy, size); memcpy (buffer, data, size); ogg_sync_wrote (&tarkindec->oy, size); if (ogg_sync_pageout (&tarkindec->oy, &tarkindec->og)) { ogg_stream_pagein (&tarkindec->os, &tarkindec->og); while (ogg_stream_packetout (&tarkindec->os, &tarkindec->op)) { if (tarkindec->op.e_o_s) break; if (tarkindec->nheader < 3) { /* 3 first packets to headerin */ tarkin_synthesis_headerin (&tarkindec->ti, &tarkindec->tc, &tarkindec->op); if (tarkindec->nheader == 2) { tarkin_synthesis_init (tarkindec->tarkin_stream, &tarkindec->ti); } tarkindec->nheader++; } else { tarkin_synthesis_packetin (tarkindec->tarkin_stream, &tarkindec->op); while (tarkin_synthesis_frameout (tarkindec->tarkin_stream, &rgb, 0, &date) == 0) { GstBuffer *outbuf; layer = &tarkindec->tarkin_stream->layer->desc; if (!GST_PAD_CAPS (tarkindec->srcpad)) { if (gst_pad_try_set_caps (tarkindec->srcpad, GST_CAPS_NEW ("tarkin_raw", "video/x-raw-rgb", "bpp", GST_PROPS_INT (24), "depth", GST_PROPS_INT (24), "endianness", GST_PROPS_INT (G_BYTE_ORDER), "red_mask", GST_PROPS_INT (0xff0000), "green_mask", GST_PROPS_INT (0xff00), "blue_mask", GST_PROPS_INT (0xff), "width", GST_PROPS_INT (layer->width), "height", GST_PROPS_INT (layer->height), "framerate", GST_PROPS_FLOAT (0.) /* FIXME!!! */ )) <= 0) { GST_ELEMENT_ERROR (tarkindec, CORE, NEGOTATION, (NULL), ("could not output format")); gst_buffer_unref (buf); return; } } outbuf = gst_buffer_new (); GST_BUFFER_DATA (outbuf) = rgb; GST_BUFFER_SIZE (outbuf) = layer->width * layer->height * 3; GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_DONTFREE); gst_pad_push (tarkindec->srcpad, GST_DATA (outbuf)); tarkin_synthesis_freeframe (tarkindec->tarkin_stream, rgb); } } } } gst_buffer_unref (buf); } }
static void gst_videodrop_chain (GstPad * pad, GstData * data) { GstVideodrop *videodrop = GST_VIDEODROP (gst_pad_get_parent (pad)); GstBuffer *buf; if (GST_IS_EVENT (data)) { GstEvent *event = GST_EVENT (data); if (GST_EVENT_TYPE (event) == GST_EVENT_DISCONTINUOUS) { /* since we rely on timestamps of the source, we need to handle * changes in time carefully. */ gint64 time; if (gst_event_discont_get_value (event, GST_FORMAT_TIME, &time)) { videodrop->total = videodrop->pass = 0; videodrop->time_adjust = time; } else { GST_ELEMENT_ERROR (videodrop, STREAM, TOO_LAZY, (NULL), ("Received discont, but no time information")); gst_event_unref (event); return; } /* FIXME: increase timestamp / speed */ } gst_pad_event_default (pad, event); return; } buf = GST_BUFFER (data); videodrop->total++; GST_DEBUG ("Received buffer at %u:%02u:%02u:%09u, fps=%lf, pass=%" G_GUINT64_FORMAT " of " G_GUINT64_FORMAT ", speed=%lf", (guint) (GST_BUFFER_TIMESTAMP (buf) / (GST_SECOND * 60 * 60)), (guint) ((GST_BUFFER_TIMESTAMP (buf) / (GST_SECOND * 60)) % 60), (guint) ((GST_BUFFER_TIMESTAMP (buf) / GST_SECOND) % 60), (guint) (GST_BUFFER_TIMESTAMP (buf) % GST_SECOND), videodrop->to_fps, videodrop->total, videodrop->pass, videodrop->speed); while (((GST_BUFFER_TIMESTAMP (buf) - videodrop->time_adjust) / videodrop->speed * videodrop->to_fps / GST_SECOND) >= videodrop->pass) { /* since we write to the struct (time/duration), we need a new struct, * but we don't want to copy around data - a subbuffer is the easiest * way to accomplish that... */ GstBuffer *copy = gst_buffer_create_sub (buf, 0, GST_BUFFER_SIZE (buf)); /* adjust timestamp/duration and push forward */ GST_BUFFER_TIMESTAMP (copy) = (videodrop->time_adjust / videodrop->speed) + GST_SECOND * videodrop->pass / videodrop->to_fps; GST_BUFFER_DURATION (copy) = GST_SECOND / videodrop->to_fps; GST_DEBUG ("Sending out buffer from out %u:%02u:%02u:%09u", (guint) (GST_BUFFER_TIMESTAMP (copy) / (GST_SECOND * 60 * 60)), (guint) ((GST_BUFFER_TIMESTAMP (copy) / (GST_SECOND * 60)) % 60), (guint) ((GST_BUFFER_TIMESTAMP (copy) / GST_SECOND) % 60), (guint) (GST_BUFFER_TIMESTAMP (copy) % GST_SECOND)); gst_pad_push (videodrop->srcpad, GST_DATA (copy)); videodrop->pass++; } gst_buffer_unref (buf); }
static GstFlowReturn gst_deinterleave_process (GstDeinterleave * self, GstBuffer * buf) { GstFlowReturn ret = GST_FLOW_OK; guint channels = GST_AUDIO_INFO_CHANNELS (&self->audio_info); guint pads_pushed = 0, buffers_allocated = 0; guint nframes = gst_buffer_get_size (buf) / channels / (GST_AUDIO_INFO_WIDTH (&self->audio_info) / 8); guint bufsize = nframes * (GST_AUDIO_INFO_WIDTH (&self->audio_info) / 8); guint i; GList *srcs; GstBuffer **buffers_out = g_new0 (GstBuffer *, channels); guint8 *in, *out; GstMapInfo read_info; gst_buffer_map (buf, &read_info, GST_MAP_READ); /* Send any pending events to all src pads */ GST_OBJECT_LOCK (self); if (self->pending_events) { GList *events; GstEvent *event; GST_DEBUG_OBJECT (self, "Sending pending events to all src pads"); for (events = self->pending_events; events != NULL; events = events->next) { event = GST_EVENT (events->data); for (srcs = self->srcpads; srcs != NULL; srcs = srcs->next) gst_pad_push_event (GST_PAD (srcs->data), gst_event_ref (event)); gst_event_unref (event); } g_list_free (self->pending_events); self->pending_events = NULL; } GST_OBJECT_UNLOCK (self); /* Allocate buffers */ for (srcs = self->srcpads, i = 0; srcs; srcs = srcs->next, i++) { buffers_out[i] = gst_buffer_new_allocate (NULL, bufsize, NULL); /* Make sure we got a correct buffer. The only other case we allow * here is an unliked pad */ if (!buffers_out[i]) goto alloc_buffer_failed; else if (buffers_out[i] && gst_buffer_get_size (buffers_out[i]) != bufsize) goto alloc_buffer_bad_size; if (buffers_out[i]) { gst_buffer_copy_into (buffers_out[i], buf, GST_BUFFER_COPY_METADATA, 0, -1); buffers_allocated++; } } /* Return NOT_LINKED if no pad was linked */ if (!buffers_allocated) { GST_WARNING_OBJECT (self, "Couldn't allocate any buffers because no pad was linked"); ret = GST_FLOW_NOT_LINKED; goto done; } /* deinterleave */ for (srcs = self->srcpads, i = 0; srcs; srcs = srcs->next, i++) { GstPad *pad = (GstPad *) srcs->data; GstMapInfo write_info; in = (guint8 *) read_info.data; in += i * (GST_AUDIO_INFO_WIDTH (&self->audio_info) / 8); if (buffers_out[i]) { gst_buffer_map (buffers_out[i], &write_info, GST_MAP_WRITE); out = (guint8 *) write_info.data; self->func (out, in, channels, nframes); gst_buffer_unmap (buffers_out[i], &write_info); ret = gst_pad_push (pad, buffers_out[i]); buffers_out[i] = NULL; if (ret == GST_FLOW_OK) pads_pushed++; else if (ret == GST_FLOW_NOT_LINKED) ret = GST_FLOW_OK; else goto push_failed; } } /* Return NOT_LINKED if no pad was linked */ if (!pads_pushed) ret = GST_FLOW_NOT_LINKED; done: gst_buffer_unmap (buf, &read_info); gst_buffer_unref (buf); g_free (buffers_out); return ret; alloc_buffer_failed: { GST_WARNING ("gst_pad_alloc_buffer() returned %s", gst_flow_get_name (ret)); goto clean_buffers; } alloc_buffer_bad_size: { GST_WARNING ("called alloc_buffer(), but didn't get requested bytes"); ret = GST_FLOW_NOT_NEGOTIATED; goto clean_buffers; } push_failed: { GST_DEBUG ("push() failed, flow = %s", gst_flow_get_name (ret)); goto clean_buffers; } clean_buffers: { gst_buffer_unmap (buf, &read_info); for (i = 0; i < channels; i++) { if (buffers_out[i]) gst_buffer_unref (buffers_out[i]); } gst_buffer_unref (buf); g_free (buffers_out); return ret; } }
static void gst_decklink_src_task (void *priv) { GstDecklinkSrc *decklinksrc = GST_DECKLINK_SRC (priv); GstBuffer *buffer; GstBuffer *audio_buffer; IDeckLinkVideoInputFrame *video_frame; IDeckLinkAudioInputPacket *audio_frame; void *data; gsize data_size; int n_samples; GstFlowReturn video_flow, audio_flow, flow; const GstDecklinkMode *mode; gboolean discont = FALSE; GST_DEBUG_OBJECT (decklinksrc, "task"); g_mutex_lock (&decklinksrc->mutex); while (decklinksrc->video_frame == NULL && !decklinksrc->stop) { g_cond_wait (&decklinksrc->cond, &decklinksrc->mutex); } video_frame = decklinksrc->video_frame; audio_frame = decklinksrc->audio_frame; decklinksrc->video_frame = NULL; decklinksrc->audio_frame = NULL; g_mutex_unlock (&decklinksrc->mutex); if (decklinksrc->stop) { if (video_frame) video_frame->Release (); if (audio_frame) audio_frame->Release (); GST_DEBUG ("stopping task"); return; } /* warning on dropped frames */ /* FIXME: post QoS message */ if (decklinksrc->dropped_frames - decklinksrc->dropped_frames_old > 0) { GST_ELEMENT_WARNING (decklinksrc, RESOURCE, READ, ("Dropped %d frame(s), for a total of %d frame(s)", decklinksrc->dropped_frames - decklinksrc->dropped_frames_old, decklinksrc->dropped_frames), (NULL)); decklinksrc->dropped_frames_old = decklinksrc->dropped_frames; /* FIXME: discont = TRUE; ? */ } if (!decklinksrc->started) { gst_decklink_src_send_initial_events (decklinksrc); decklinksrc->started = TRUE; } if (g_atomic_int_get (&decklinksrc->have_events)) { GList *l; GST_OBJECT_LOCK (decklinksrc); for (l = decklinksrc->pending_events; l != NULL; l = l->next) { GstEvent *event = GST_EVENT (l->data); GST_DEBUG_OBJECT (decklinksrc, "pushing %s event", GST_EVENT_TYPE_NAME (event)); gst_pad_push_event (decklinksrc->videosrcpad, gst_event_ref (event)); gst_pad_push_event (decklinksrc->audiosrcpad, event); l->data = NULL; } g_list_free (decklinksrc->pending_events); decklinksrc->pending_events = NULL; g_atomic_int_set (&decklinksrc->have_events, FALSE); GST_OBJECT_UNLOCK (decklinksrc); } mode = gst_decklink_get_mode (decklinksrc->mode); video_frame->GetBytes (&data); data_size = mode->width * mode->height * 2; if (decklinksrc->copy_data) { buffer = gst_buffer_new_and_alloc (data_size); gst_buffer_fill (buffer, 0, data, data_size); video_frame->Release (); } else { VideoFrame *vf; vf = (VideoFrame *) g_malloc0 (sizeof (VideoFrame)); buffer = gst_buffer_new_wrapped_full ((GstMemoryFlags) 0, data, data_size, 0, data_size, vf, (GDestroyNotify) video_frame_free); vf->frame = video_frame; vf->input = decklinksrc->input; vf->input->AddRef (); } GST_BUFFER_TIMESTAMP (buffer) = gst_util_uint64_scale_int (decklinksrc->frame_num * GST_SECOND, mode->fps_d, mode->fps_n); GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int ((decklinksrc->frame_num + 1) * GST_SECOND, mode->fps_d, mode->fps_n) - GST_BUFFER_TIMESTAMP (buffer); GST_BUFFER_OFFSET (buffer) = decklinksrc->frame_num; GST_BUFFER_OFFSET_END (buffer) = decklinksrc->frame_num; /* FIXME: +1? */ /* FIXME: set video meta */ if (decklinksrc->frame_num == 0) discont = TRUE; if (discont) GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT); else GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT); video_flow = gst_pad_push (decklinksrc->videosrcpad, buffer); if (gst_pad_is_linked (decklinksrc->audiosrcpad)) { n_samples = audio_frame->GetSampleFrameCount (); audio_frame->GetBytes (&data); audio_buffer = gst_buffer_new_and_alloc (n_samples * 2 * 2); gst_buffer_fill (audio_buffer, 0, data, n_samples * 2 * 2); GST_BUFFER_TIMESTAMP (audio_buffer) = gst_util_uint64_scale_int (decklinksrc->num_audio_samples * GST_SECOND, 1, 48000); /* FIXME: should be next_timestamp - timestamp for perfect stream */ GST_BUFFER_DURATION (audio_buffer) = gst_util_uint64_scale_int (n_samples * GST_SECOND, 1, 48000); GST_BUFFER_OFFSET (audio_buffer) = decklinksrc->num_audio_samples; GST_BUFFER_OFFSET_END (audio_buffer) = GST_BUFFER_OFFSET (audio_buffer) + n_samples; decklinksrc->num_audio_samples += n_samples; if (discont) GST_BUFFER_FLAG_SET (audio_buffer, GST_BUFFER_FLAG_DISCONT); else GST_BUFFER_FLAG_UNSET (audio_buffer, GST_BUFFER_FLAG_DISCONT); audio_flow = gst_pad_push (decklinksrc->audiosrcpad, audio_buffer); } else { audio_flow = GST_FLOW_NOT_LINKED; } if (audio_flow == GST_FLOW_NOT_LINKED) flow = video_flow; else if (video_flow == GST_FLOW_NOT_LINKED) flow = audio_flow; else if (video_flow == GST_FLOW_FLUSHING || audio_flow == GST_FLOW_FLUSHING) flow = GST_FLOW_FLUSHING; else if (video_flow < GST_FLOW_EOS) flow = video_flow; else if (audio_flow < GST_FLOW_EOS) flow = audio_flow; else if (video_flow == GST_FLOW_EOS || audio_flow == GST_FLOW_EOS) flow = GST_FLOW_EOS; else flow = video_flow; if (g_atomic_int_compare_and_exchange (&decklinksrc->pending_eos, TRUE, FALSE)) { GST_INFO_OBJECT (decklinksrc, "EOS pending"); flow = GST_FLOW_EOS; } if (flow != GST_FLOW_OK) goto pause; done: if (audio_frame) audio_frame->Release (); return; pause: { const gchar *reason = gst_flow_get_name (flow); GstEvent *event = NULL; GST_DEBUG_OBJECT (decklinksrc, "pausing task, reason %s", reason); gst_task_pause (decklinksrc->task); if (flow == GST_FLOW_EOS) { /* perform EOS logic (very crude, we don't even keep a GstSegment) */ event = gst_event_new_eos (); } else if (flow == GST_FLOW_NOT_LINKED || flow < GST_FLOW_EOS) { event = gst_event_new_eos (); /* for fatal errors we post an error message, post the error * first so the app knows about the error first. * Also don't do this for FLUSHING because it happens * due to flushing and posting an error message because of * that is the wrong thing to do, e.g. when we're doing * a flushing seek. */ GST_ELEMENT_ERROR (decklinksrc, STREAM, FAILED, ("Internal data flow error."), ("streaming task paused, reason %s (%d)", reason, flow)); } if (event != NULL) { GST_INFO_OBJECT (decklinksrc->videosrcpad, "pushing EOS event"); gst_pad_push_event (decklinksrc->videosrcpad, gst_event_ref (event)); GST_INFO_OBJECT (decklinksrc->audiosrcpad, "pushing EOS event"); gst_pad_push_event (decklinksrc->audiosrcpad, event); } goto done; } }
/* Pipeline Callbacks */ static gboolean probe_cb (InsanityGstTest * ptest, GstPad * pad, GstMiniObject * object, gpointer userdata) { InsanityTest *test = INSANITY_TEST (ptest); global_last_probe = g_get_monotonic_time (); DECODER_TEST_LOCK (); if (GST_IS_BUFFER (object)) { GstBuffer *buf; GstClockTime ts; buf = GST_BUFFER (object); ts = GST_BUFFER_PTS (buf); /* First check clipping */ if (glob_testing_parser == FALSE && GST_CLOCK_TIME_IS_VALID (ts) && glob_waiting_segment == FALSE) { GstClockTime ts_end, cstart, cstop; /* Check if buffer is completely outside the segment */ ts_end = ts; if (GST_BUFFER_DURATION_IS_VALID (buf)) ts_end += GST_BUFFER_DURATION (buf); /* Check if buffer is completely outside the segment */ ts_end = ts; if (!gst_segment_clip (&glob_last_segment, glob_last_segment.format, ts, ts_end, &cstart, &cstop)) { char *msg = g_strdup_printf ("Got timestamp %" GST_TIME_FORMAT " -- %" GST_TIME_FORMAT ", outside configured segment (%" GST_TIME_FORMAT " -- %" GST_TIME_FORMAT "), method %s", GST_TIME_ARGS (ts), GST_TIME_ARGS (ts_end), GST_TIME_ARGS (glob_last_segment.start), GST_TIME_ARGS (glob_last_segment.stop), test_get_name (glob_in_progress)); insanity_test_validate_checklist_item (INSANITY_TEST (ptest), "segment-clipping", FALSE, msg); g_free (msg); glob_bad_segment_clipping = TRUE; } } switch (glob_in_progress) { case TEST_NONE: if (glob_waiting_first_segment == TRUE) insanity_test_validate_checklist_item (test, "first-segment", FALSE, "Got a buffer before the first segment"); /* Got the first buffer, starting testing dance */ next_test (test); break; case TEST_POSITION: test_position (test, buf); break; case TEST_FAST_FORWARD: case TEST_BACKWARD_PLAYBACK: case TEST_FAST_BACKWARD: { gint64 stime_ts; if (GST_CLOCK_TIME_IS_VALID (ts) == FALSE || glob_waiting_segment == TRUE) { break; } stime_ts = gst_segment_to_stream_time (&glob_last_segment, glob_last_segment.format, ts); if (GST_CLOCK_TIME_IS_VALID (glob_seek_first_buf_ts) == FALSE) { GstClockTime expected_ts = gst_segment_to_stream_time (&glob_last_segment, glob_last_segment.format, glob_seek_rate < 0 ? glob_seek_stop_ts : glob_seek_segment_seektime); GstClockTimeDiff diff = ABS (GST_CLOCK_DIFF (stime_ts, expected_ts)); if (diff > SEEK_THRESHOLD) { gchar *valmsg = g_strdup_printf ("Received buffer timestamp %" GST_TIME_FORMAT " Seeek wanted %" GST_TIME_FORMAT "", GST_TIME_ARGS (stime_ts), GST_TIME_ARGS (expected_ts)); validate_current_test (test, FALSE, valmsg); next_test (test); g_free (valmsg); } else glob_seek_first_buf_ts = stime_ts; } else { GstClockTimeDiff diff = GST_CLOCK_DIFF (stime_ts, glob_seek_first_buf_ts); if (diff < 0) diff = -diff; if (diff >= glob_playback_duration * GST_SECOND) { validate_current_test (test, TRUE, NULL); next_test (test); } } break; } default: break; } } else if (GST_IS_EVENT (object)) { GstEvent *event = GST_EVENT (object); guint seqnum = gst_event_get_seqnum (event); if (G_LIKELY (glob_seqnum_found == FALSE) && seqnum == glob_seqnum) glob_seqnum_found = TRUE; if (glob_seqnum_found == TRUE && seqnum != glob_seqnum) { gchar *message = g_strdup_printf ("Current seqnum %i != " "received %i", glob_seqnum, seqnum); insanity_test_validate_checklist_item (test, "seqnum-management", FALSE, message); glob_wrong_seqnum = TRUE; g_free (message); } switch (GST_EVENT_TYPE (event)) { case GST_EVENT_SEGMENT: { gst_event_copy_segment (event, &glob_last_segment); if (glob_waiting_segment == FALSE) /* Cache the segment as it will be our reference but don't look * further */ goto done; glob_last_segment_start_time = glob_last_segment.start; if (glob_waiting_first_segment == TRUE) { insanity_test_validate_checklist_item (test, "first-segment", TRUE, NULL); glob_waiting_first_segment = FALSE; } else if (glob_in_progress >= TEST_FAST_FORWARD && glob_in_progress <= TEST_FAST_BACKWARD) { GstClockTimeDiff diff; gboolean valid_stop = TRUE; GstClockTimeDiff wdiff, rdiff; rdiff = ABS (GST_CLOCK_DIFF (glob_last_segment.stop, glob_last_segment.start)) * ABS (glob_last_segment.rate * glob_last_segment.applied_rate); wdiff = ABS (GST_CLOCK_DIFF (glob_seek_stop_ts, glob_seek_segment_seektime)); diff = GST_CLOCK_DIFF (glob_last_segment.position, glob_seek_segment_seektime); if (diff < 0) diff = -diff; /* Now compare with the expected segment */ if ((glob_last_segment.rate * glob_last_segment.applied_rate) == glob_seek_rate && diff <= SEEK_THRESHOLD && valid_stop) { glob_seek_got_segment = TRUE; } else { GstClockTime stopdiff = ABS (GST_CLOCK_DIFF (rdiff, wdiff)); gchar *validate_msg = g_strdup_printf ("Wrong segment received, Rate %f expected " "%f, start time diff %" GST_TIME_FORMAT " stop diff %" GST_TIME_FORMAT, (glob_last_segment.rate * glob_last_segment.applied_rate), glob_seek_rate, GST_TIME_ARGS (diff), GST_TIME_ARGS (stopdiff)); validate_current_test (test, FALSE, validate_msg); next_test (test); g_free (validate_msg); } } glob_waiting_segment = FALSE; break; } default: break; } } done: DECODER_TEST_UNLOCK (); return TRUE; }
static gboolean gst_video_flip_src_event (GstBaseTransform * trans, GstEvent * event) { GstVideoFlip *vf = GST_VIDEO_FLIP (trans); gdouble new_x, new_y, x, y; GstStructure *structure; GST_DEBUG_OBJECT (vf, "handling %s event", GST_EVENT_TYPE_NAME (event)); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_NAVIGATION: event = GST_EVENT (gst_mini_object_make_writable (GST_MINI_OBJECT (event))); structure = (GstStructure *) gst_event_get_structure (event); if (gst_structure_get_double (structure, "pointer_x", &x) && gst_structure_get_double (structure, "pointer_y", &y)) { GST_DEBUG_OBJECT (vf, "converting %fx%f", x, y); switch (vf->method) { case GST_VIDEO_FLIP_METHOD_90R: new_x = y; new_y = vf->to_width - x; break; case GST_VIDEO_FLIP_METHOD_90L: new_x = vf->to_height - y; new_y = x; break; case GST_VIDEO_FLIP_METHOD_OTHER: new_x = vf->to_height - y; new_y = vf->to_width - x; break; case GST_VIDEO_FLIP_METHOD_TRANS: new_x = y; new_y = x; break; case GST_VIDEO_FLIP_METHOD_180: new_x = vf->to_width - x; new_y = vf->to_height - y; break; case GST_VIDEO_FLIP_METHOD_HORIZ: new_x = vf->to_width - x; new_y = y; break; case GST_VIDEO_FLIP_METHOD_VERT: new_x = x; new_y = vf->to_height - y; break; default: new_x = x; new_y = y; break; } GST_DEBUG_OBJECT (vf, "to %fx%f", new_x, new_y); gst_structure_set (structure, "pointer_x", G_TYPE_DOUBLE, new_x, "pointer_y", G_TYPE_DOUBLE, new_y, NULL); } break; default: break; } return TRUE; }
static GstFlowReturn xing_mp3_encoder_chain(GstPad *pad, GstBuffer *buf) { XingMp3Encoder *encoder; GstFlowReturn ret = GST_FLOW_OK; g_return_val_if_fail(pad != NULL, GST_FLOW_ERROR); g_return_val_if_fail(GST_IS_PAD(pad), GST_FLOW_ERROR); g_return_val_if_fail(buf != NULL, GST_FLOW_ERROR); encoder = XING_MP3_ENCODER(gst_pad_get_parent(pad)); if(GST_IS_EVENT(buf)) { GstEvent *event = GST_EVENT(buf); switch(GST_EVENT_TYPE(event)) { case GST_EVENT_EOS: encoder->at_end_of_stream = TRUE; gst_event_unref(event); break; default: gst_pad_event_default(pad, event); return ret; } } else { guchar *data; gulong size; gulong i, j; float **buffer; gint buf_size; gint buf_pos; if(!encoder->is_initialized) { gst_buffer_unref(buf); GST_ELEMENT_ERROR(encoder, CORE, NEGOTIATION, (NULL), ("Encoder not initialized")); return GST_FLOW_UNEXPECTED; } if(!encoder->header_sent) { E_CONTROL control; MPEG_HEAD head; guchar output_buffer[OUTPUT_BUFFER_SIZE]; gint buf_len; if(!encoder->use_cbr) { GstFlowReturn push_ret; hx_mp3enc_l3_info_ec(encoder->xing_encoder, &control); hx_mp3enc_l3_info_head(encoder->xing_encoder, &head); buf_len = XingHeader(control.samprate, head.mode, control.cr_bit, control.original, VBR_SCALE_FLAG /* FRAMES_FLAG | BYTES_FLAG | TOC_FLAG */, 0, 0, control.vbr_flag ? control.vbr_mnr : -1, NULL, output_buffer, 0, 0, 0); if((push_ret = xing_mp3_encoder_push_buffer(encoder, output_buffer, buf_len)) != GST_FLOW_OK) { gst_buffer_unref(buf); return push_ret; } } encoder->header_sent = TRUE; } data = (guchar *)GST_BUFFER_DATA(buf); buf_size = GST_BUFFER_SIZE(buf); buf_pos = 0; /* Transfer incoming data to internal buffer. * TODO: Use a ring buffer, avoid memmove () */ while(buf_pos < buf_size) { gint gulp = MIN(buf_size - buf_pos, INPUT_BUFFER_SIZE - encoder->input_buffer_pos); memcpy(encoder->input_buffer + encoder->input_buffer_pos, data + buf_pos, gulp); encoder->samples_in += gulp / (2 * encoder->channels); encoder->input_buffer_pos += gulp; buf_pos += gulp; /* Pass data on to encoder */ while(encoder->input_buffer_pos >= encoder->bytes_per_frame) { guchar output_buffer[OUTPUT_BUFFER_SIZE]; IN_OUT x; x = hx_mp3enc_mp3_encode_frame(encoder->xing_encoder, encoder->input_buffer, output_buffer); if(x.in_bytes == 0 && x.out_bytes == 0) { break; } memmove(encoder->input_buffer, encoder->input_buffer + x.in_bytes, encoder->input_buffer_pos - x.in_bytes); encoder->input_buffer_pos -= x.in_bytes; /* Accept output from encoder and pass it on. * TODO: Do this less often and save CPU */ if(x.out_bytes > 0) { GstFlowReturn push_ret; if((push_ret = xing_mp3_encoder_push_buffer(encoder, output_buffer, x.out_bytes)) != GST_FLOW_OK) { gst_buffer_unref(buf); return push_ret; } } } } gst_buffer_unref(buf); } if(encoder->at_end_of_stream) { gst_pad_push_event(encoder->srcpad, gst_event_new_eos()); } return ret; }
static GstFlowReturn gst_deinterleave_process (GstDeinterleave * self, GstBuffer * buf) { GstFlowReturn ret = GST_FLOW_OK; guint channels = self->channels; guint pads_pushed = 0, buffers_allocated = 0; guint nframes = GST_BUFFER_SIZE (buf) / channels / (self->width / 8); guint bufsize = nframes * (self->width / 8); guint i; GList *srcs; GstBuffer **buffers_out = g_new0 (GstBuffer *, channels); guint8 *in, *out; /* Send any pending events to all src pads */ GST_OBJECT_LOCK (self); if (self->pending_events) { GList *events; GstEvent *event; GST_DEBUG_OBJECT (self, "Sending pending events to all src pads"); for (events = self->pending_events; events != NULL; events = events->next) { event = GST_EVENT (events->data); for (srcs = self->srcpads; srcs != NULL; srcs = srcs->next) gst_pad_push_event (GST_PAD (srcs->data), gst_event_ref (event)); gst_event_unref (event); } g_list_free (self->pending_events); self->pending_events = NULL; } GST_OBJECT_UNLOCK (self); /* Allocate buffers */ for (srcs = self->srcpads, i = 0; srcs; srcs = srcs->next, i++) { GstPad *pad = (GstPad *) srcs->data; buffers_out[i] = NULL; ret = gst_pad_alloc_buffer (pad, GST_BUFFER_OFFSET_NONE, bufsize, GST_PAD_CAPS (pad), &buffers_out[i]); /* Make sure we got a correct buffer. The only other case we allow * here is an unliked pad */ if (ret != GST_FLOW_OK && ret != GST_FLOW_NOT_LINKED) goto alloc_buffer_failed; else if (buffers_out[i] && GST_BUFFER_SIZE (buffers_out[i]) != bufsize) goto alloc_buffer_bad_size; else if (buffers_out[i] && !gst_caps_is_equal (GST_BUFFER_CAPS (buffers_out[i]), GST_PAD_CAPS (pad))) goto invalid_caps; if (buffers_out[i]) { gst_buffer_copy_metadata (buffers_out[i], buf, GST_BUFFER_COPY_TIMESTAMPS | GST_BUFFER_COPY_FLAGS); buffers_allocated++; } } /* Return NOT_LINKED if no pad was linked */ if (!buffers_allocated) { GST_WARNING_OBJECT (self, "Couldn't allocate any buffers because no pad was linked"); ret = GST_FLOW_NOT_LINKED; goto done; } /* deinterleave */ for (srcs = self->srcpads, i = 0; srcs; srcs = srcs->next, i++) { GstPad *pad = (GstPad *) srcs->data; in = (guint8 *) GST_BUFFER_DATA (buf); in += i * (self->width / 8); if (buffers_out[i]) { out = (guint8 *) GST_BUFFER_DATA (buffers_out[i]); self->func (out, in, channels, nframes); ret = gst_pad_push (pad, buffers_out[i]); buffers_out[i] = NULL; if (ret == GST_FLOW_OK) pads_pushed++; else if (ret == GST_FLOW_NOT_LINKED) ret = GST_FLOW_OK; else goto push_failed; } } /* Return NOT_LINKED if no pad was linked */ if (!pads_pushed) ret = GST_FLOW_NOT_LINKED; done: gst_buffer_unref (buf); g_free (buffers_out); return ret; alloc_buffer_failed: { GST_WARNING ("gst_pad_alloc_buffer() returned %s", gst_flow_get_name (ret)); goto clean_buffers; } alloc_buffer_bad_size: { GST_WARNING ("called alloc_buffer(), but didn't get requested bytes"); ret = GST_FLOW_NOT_NEGOTIATED; goto clean_buffers; } invalid_caps: { GST_WARNING ("called alloc_buffer(), but didn't get requested caps"); ret = GST_FLOW_NOT_NEGOTIATED; goto clean_buffers; } push_failed: { GST_DEBUG ("push() failed, flow = %s", gst_flow_get_name (ret)); goto clean_buffers; } clean_buffers: { for (i = 0; i < channels; i++) { if (buffers_out[i]) gst_buffer_unref (buffers_out[i]); } gst_buffer_unref (buf); g_free (buffers_out); return ret; } }
static gboolean probe (InsanityGstTest * ptest, GstPad * pad, GstMiniObject * object, gpointer userdata) { InsanityTest *test = INSANITY_TEST (ptest); GstClockTimeDiff diff; HLS_TEST_LOCK (); if (GST_IS_BUFFER (object)) { if (GST_CLOCK_TIME_IS_VALID (glob_target) && GST_CLOCK_TIME_IS_VALID (glob_segment)) { diff = GST_CLOCK_DIFF (GST_BUFFER_PTS (object), glob_target); if (diff < 0) diff = -diff; LOG ("Got buffer start %" GST_TIME_FORMAT ", expected around %" GST_TIME_FORMAT ", off by %" GST_TIME_FORMAT ", method %d\n", GST_TIME_ARGS (GST_BUFFER_PTS (object)), GST_TIME_ARGS (glob_target), GST_TIME_ARGS (diff), glob_state); if (diff < SEEK_THRESHOLD) { /* Reseting global segment for next seek */ glob_segment = GST_CLOCK_TIME_NONE; seek_targets[glob_seek_nb].buffer_received = TRUE; glob_seek_nb++; if (glob_is_seekable && glob_seek_nb < G_N_ELEMENTS (seek_targets)) { /* Program next seek */ glob_wait_time = hls_test_get_wait_time (INSANITY_TEST (ptest)); glob_timer_id = g_timeout_add (250, (GSourceFunc) & wait_and_end_step, INSANITY_TEST (ptest)); } else { /* Done with the test */ insanity_test_done (test); } } } } else { GstEvent *event = GST_EVENT (object); if (GST_EVENT_TYPE (event) == GST_EVENT_SEGMENT) { GstSegment segment; gst_event_copy_segment (event, &segment); /* Not waiting for a segment, ignoring */ if (!GST_CLOCK_TIME_IS_VALID (glob_target)) { LOG ("Got segment starting at %" GST_TIME_FORMAT ", but we are not waiting for segment\n", GST_TIME_ARGS (segment.start)); goto ignore_segment; } /* Checking the segment has good timing */ diff = GST_CLOCK_DIFF (segment.start, glob_target); if (diff < 0) diff = -diff; if (diff > SEEK_THRESHOLD) { LOG ("Got segment start %" GST_TIME_FORMAT ", expected around %" GST_TIME_FORMAT ", off by %" GST_TIME_FORMAT ", method %d\n", GST_TIME_ARGS (segment.start), GST_TIME_ARGS (glob_target), GST_TIME_ARGS (diff), glob_state); } else { LOG ("Got segment start %" GST_TIME_FORMAT ", expected around %" GST_TIME_FORMAT ", off by %" GST_TIME_FORMAT ", method %d\n", GST_TIME_ARGS (segment.start), GST_TIME_ARGS (glob_target), GST_TIME_ARGS (diff), glob_state); seek_targets[glob_seek_nb].segment_received = TRUE; glob_segment = segment.start; } } } ignore_segment: HLS_TEST_UNLOCK (); return TRUE; }
static void dxr3audiosink_chain_ac3 (GstPad * pad, GstData * _data) { Dxr3AudioSink *sink; gint bytes_written = 0; GstBuffer *buf; g_return_if_fail (pad != NULL); g_return_if_fail (GST_IS_PAD (pad)); g_return_if_fail (_data != NULL); sink = DXR3AUDIOSINK (gst_pad_get_parent (pad)); if (GST_IS_EVENT (_data)) { dxr3audiosink_handle_event (pad, GST_EVENT (_data)); return; } buf = GST_BUFFER (_data); if (sink->mode != DXR3AUDIOSINK_MODE_AC3) { /* Switch to AC3 mode. */ dxr3audiosink_set_mode_ac3 (sink); } if (GST_OBJECT_FLAG_IS_SET (sink, DXR3AUDIOSINK_OPEN)) { int event; if (GST_BUFFER_TIMESTAMP (buf) != GST_CLOCK_TIME_NONE) { /* We have a new scr value. */ /* fprintf (stderr, "------ Audio Time %.04f\n", */ /* (double) GST_BUFFER_TIMESTAMP (buf) / GST_SECOND); */ sink->scr = GSTTIME_TO_MPEGTIME (GST_BUFFER_TIMESTAMP (buf)); } /* Push the new data into the padder. */ ac3p_push_data (sink->padder, GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf)); /* Parse the data. */ event = ac3p_parse (sink->padder); while (event != AC3P_EVENT_PUSH) { switch (event) { case AC3P_EVENT_FRAME: /* We have a new frame: */ /* Update the system reference clock (SCR) in the card. */ { unsigned in, out, odelay; unsigned diff; ioctl (sink->control_fd, EM8300_IOCTL_SCR_GET, &out); ioctl (sink->audio_fd, SNDCTL_DSP_GETODELAY, &odelay); /* 192000 bytes/sec */ in = MPEGTIME_TO_DXRTIME (sink->scr - (odelay * 90) / 192); diff = in > out ? in - out : out - in; if (diff > 1800) { dxr3audiosink_set_scr (sink, in); } } /* Update our SCR value. */ sink->scr += TIME_FOR_BYTES (ac3p_frame_size (sink->padder)); /* Write the frame to the sound device. */ bytes_written = write (sink->audio_fd, ac3p_frame (sink->padder), AC3P_IEC_FRAME_SIZE); if (bytes_written < AC3P_IEC_FRAME_SIZE) { fprintf (stderr, "dxr3audiosink: Warning: %d bytes should be " "written, only %d bytes written\n", AC3P_IEC_FRAME_SIZE, bytes_written); } break; } event = ac3p_parse (sink->padder); } } gst_buffer_unref (buf); }
static GstFlowReturn gst_wavpack_parse_push_buffer (GstWavpackParse * wvparse, GstBuffer * buf, WavpackHeader * header) { wvparse->current_offset += header->ckSize + 8; wvparse->segment.last_stop = header->block_index; if (wvparse->need_newsegment) { if (gst_wavpack_parse_send_newsegment (wvparse, FALSE)) wvparse->need_newsegment = FALSE; } /* send any queued events */ if (wvparse->queued_events) { GList *l; for (l = wvparse->queued_events; l != NULL; l = l->next) { gst_pad_push_event (wvparse->srcpad, GST_EVENT (l->data)); } g_list_free (wvparse->queued_events); wvparse->queued_events = NULL; } if (wvparse->pending_buffer == NULL) { wvparse->pending_buffer = buf; wvparse->pending_offset = header->block_index; } else if (wvparse->pending_offset == header->block_index) { wvparse->pending_buffer = gst_buffer_join (wvparse->pending_buffer, buf); } else { GST_ERROR ("Got incomplete block, dropping"); gst_buffer_unref (wvparse->pending_buffer); wvparse->pending_buffer = buf; wvparse->pending_offset = header->block_index; } if (!(header->flags & FINAL_BLOCK)) return GST_FLOW_OK; buf = wvparse->pending_buffer; wvparse->pending_buffer = NULL; GST_BUFFER_TIMESTAMP (buf) = gst_util_uint64_scale_int (header->block_index, GST_SECOND, wvparse->samplerate); GST_BUFFER_DURATION (buf) = gst_util_uint64_scale_int (header->block_samples, GST_SECOND, wvparse->samplerate); GST_BUFFER_OFFSET (buf) = header->block_index; GST_BUFFER_OFFSET_END (buf) = header->block_index + header->block_samples; if (wvparse->discont || wvparse->next_block_index != header->block_index) { GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT); wvparse->discont = FALSE; } wvparse->next_block_index = header->block_index + header->block_samples; gst_buffer_set_caps (buf, GST_PAD_CAPS (wvparse->srcpad)); GST_LOG_OBJECT (wvparse, "Pushing buffer with time %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf))); return gst_pad_push (wvparse->srcpad, buf); }
static void gst_rfc2250_enc_loop (GstElement * element) { GstRFC2250Enc *enc = GST_RFC2250_ENC (element); GstData *data; guint id; gboolean mpeg2; data = gst_mpeg_packetize_read (enc->packetize); id = GST_MPEG_PACKETIZE_ID (enc->packetize); mpeg2 = GST_MPEG_PACKETIZE_IS_MPEG2 (enc->packetize); if (GST_IS_BUFFER (data)) { GstBuffer *buffer = GST_BUFFER (data); GST_DEBUG ("rfc2250enc: have chunk 0x%02X", id); switch (id) { case SEQUENCE_START_CODE: gst_rfc2250_enc_new_buffer (enc); enc->flags |= ENC_HAVE_SEQ; break; case GOP_START_CODE: if (enc->flags & ENC_HAVE_DATA) { gst_rfc2250_enc_new_buffer (enc); } enc->flags |= ENC_HAVE_GOP; break; case PICTURE_START_CODE: if (enc->flags & ENC_HAVE_DATA) { gst_rfc2250_enc_new_buffer (enc); } enc->flags |= ENC_HAVE_PIC; break; case EXT_START_CODE: case USER_START_CODE: case SEQUENCE_ERROR_START_CODE: case SEQUENCE_END_START_CODE: break; default: /* do this here because of the long range */ if (id >= SLICE_MIN_START_CODE && id <= SLICE_MAX_START_CODE) { enc->flags |= ENC_HAVE_DATA; gst_rfc2250_enc_add_slice (enc, buffer); buffer = NULL; break; } break; } if (buffer) { gst_buffer_merge (enc->packet, buffer); enc->remaining -= GST_BUFFER_SIZE (buffer); gst_buffer_unref (buffer); } } else { if (enc->packet) { gst_pad_push (enc->srcpad, GST_DATA (enc->packet)); enc->packet = NULL; enc->flags = 0; enc->remaining = enc->MTU; } gst_pad_event_default (enc->sinkpad, GST_EVENT (data)); } }
static void dxr3audiosink_chain_pcm (GstPad * pad, GstData * _data) { Dxr3AudioSink *sink; gint bytes_written = 0; GstBuffer *buf; g_return_if_fail (pad != NULL); g_return_if_fail (GST_IS_PAD (pad)); g_return_if_fail (_data != NULL); sink = DXR3AUDIOSINK (gst_pad_get_parent (pad)); if (GST_IS_EVENT (_data)) { dxr3audiosink_handle_event (pad, GST_EVENT (_data)); return; } buf = GST_BUFFER (_data); if (sink->mode != DXR3AUDIOSINK_MODE_PCM) { /* Switch to PCM mode. */ dxr3audiosink_set_mode_pcm (sink); } if (GST_OBJECT_FLAG_IS_SET (sink, DXR3AUDIOSINK_OPEN)) { if (GST_BUFFER_TIMESTAMP (buf) != GST_CLOCK_TIME_NONE) { /* We have a new scr value. */ sink->scr = GSTTIME_TO_MPEGTIME (GST_BUFFER_TIMESTAMP (buf)); } /* Update the system reference clock (SCR) in the card. */ { unsigned in, out, odelay; unsigned diff; ioctl (sink->control_fd, EM8300_IOCTL_SCR_GET, &out); ioctl (sink->audio_fd, SNDCTL_DSP_GETODELAY, &odelay); in = MPEGTIME_TO_DXRTIME (sink->scr - (odelay * 90) / 192); diff = in > out ? in - out : out - in; if (diff > 1800) { dxr3audiosink_set_scr (sink, in); } } /* Update our SCR value. */ sink->scr += (unsigned) (GST_BUFFER_SIZE (buf) * (90000.0 / ((float) sink->rate * 4))); /* Write the buffer to the sound device. */ bytes_written = write (sink->audio_fd, GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf)); if (bytes_written < GST_BUFFER_SIZE (buf)) { fprintf (stderr, "dxr3audiosink: Warning: %d bytes should be " "written, only %d bytes written\n", GST_BUFFER_SIZE (buf), bytes_written); } } gst_buffer_unref (buf); }