Example #1
0
static void
handle_set_media_message (SnraClient * client, GstStructure * s)
{
  const gchar *protocol, *path;
  int port;
  GstClockTime base_time;
  gint64 tmp;
  gchar *uri;
  gboolean paused;

  protocol = gst_structure_get_string (s, "resource-protocol");
  path = gst_structure_get_string (s, "resource-path");

  if (protocol == NULL || path == NULL)
    return;                     /* Invalid message */

  if (!snra_json_structure_get_int (s, "resource-port", &port))
    return;

  if (!snra_json_structure_get_int64 (s, "base-time", &tmp))
    return;                     /* Invalid message */

  if (!snra_json_structure_get_boolean (s, "paused", &paused))
    return;

  base_time = (GstClockTime) (tmp);

  if (client->player == NULL) {
    construct_player (client);
    if (client->player == NULL)
      return;
  } else {
    gst_element_set_state (client->player, GST_STATE_NULL);
  }

  uri =
      g_strdup_printf ("%s://%s:%d%s", protocol, client->connected_server, port,
      path);
  g_print ("Playing URI %s base_time %" GST_TIME_FORMAT "\n", uri,
      GST_TIME_ARGS (base_time));
  g_object_set (client->player, "uri", uri, NULL);
  g_free (uri);

  gst_element_set_start_time (client->player, GST_CLOCK_TIME_NONE);
  gst_element_set_base_time (client->player, base_time);
  gst_pipeline_use_clock (GST_PIPELINE (client->player), client->net_clock);

  if (client->enabled) {
    if (paused)
      client->state = GST_STATE_PAUSED;
    else
      client->state = GST_STATE_PLAYING;
  } else {
    client->state = DISABLED_STATE;
  }

  gst_element_set_state (client->player, client->state);
}
Example #2
0
static void
set_media (AurClient * client)
{
  if (client->player == NULL) {
    construct_player (client);
    if (client->player == NULL)
      return;
  }

  gst_element_set_state (client->player, GST_STATE_READY);

  g_print ("Setting media URI %s base_time %" GST_TIME_FORMAT " position %"
      GST_TIME_FORMAT " paused %i\n", client->uri,
      GST_TIME_ARGS (client->base_time), GST_TIME_ARGS (client->position),
      client->paused);
  g_object_set (client->player, "uri", client->uri, NULL);

  gst_element_set_start_time (client->player, GST_CLOCK_TIME_NONE);
  gst_pipeline_use_clock (GST_PIPELINE (client->player), client->net_clock);

  /* Do the preroll */
  gst_element_set_state (client->player, GST_STATE_PAUSED);
  gst_element_get_state (client->player, NULL, NULL, GST_CLOCK_TIME_NONE);

  /* Compensate preroll time if playing */
  if (!client->paused) {
    GstClockTime now = gst_clock_get_time (client->net_clock);
    if (now > (client->base_time + client->position))
      client->position = now - client->base_time;
  }

  /* If position is off by more than 0.5 sec, seek to that position
   * (otherwise, just let the player skip) */
  if (client->position > GST_SECOND/2) {
    /* FIXME Query duration, so we don't seek after EOS */
    if (!gst_element_seek_simple (client->player, GST_FORMAT_TIME,
            GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE, client->position)) {
      g_warning ("Initial seekd failed, player will go faster instead");
      client->position = 0;
    }
  }

  /* Set base time considering seek position after seek */
  gst_element_set_base_time (client->player,
      client->base_time + client->position);

  /* Before we start playing, ensure we have selected the right audio track */
  set_language (client);

  if (!client->paused)
    gst_element_set_state (client->player, GST_STATE_PLAYING);
}
int main(int argc, char *argv[]) {
  GMainLoop *main_loop;
  GstClock *client_clock, *tmp_clock;
  GstNetTimeProvider *prov_clock;
  guint16 clock_port;
  GstClockTime base_time;

  /* Initialize GStreamer */
  gst_init (&argc, &argv);

  prov_clock = create_net_clock (&clock_port);
  client_clock = gst_net_client_clock_new (NULL, "127.0.0.1", clock_port, 0);

  /* Wait 0.5 seconds for the clock to stabilise */
  g_usleep (G_USEC_PER_SEC / 2);
  base_time = share_base_time (clock_port, prov_clock);

  /* Create the elements */
  playbin = gst_element_factory_make ("playbin", "playbin");
  g_object_set (playbin, "uri", "file:///home/luisbg/samples/big_buck_bunny_1080p_h264.mov", NULL);

  gst_pipeline_use_clock (GST_PIPELINE (playbin), client_clock);
  gst_element_set_base_time (playbin, base_time);
  gst_element_set_start_time (playbin, GST_CLOCK_TIME_NONE);
  gst_pipeline_set_latency (GST_PIPELINE (playbin), GST_SECOND / 2);

  gst_element_set_state (playbin, GST_STATE_PLAYING);

  /* Create a GLib Main Loop and set it to run */
  main_loop = g_main_loop_new (NULL, FALSE);
  g_main_loop_run (main_loop);

  /* Free resources */
  g_main_loop_unref (main_loop);
  gst_element_set_state (playbin, GST_STATE_NULL);
  gst_object_unref (playbin);
  return 0;
}
Example #4
0
static void owr_media_renderer_init(OwrMediaRenderer *renderer)
{
    OwrMediaRendererPrivate *priv;
    GstBus *bus;
    GSource *bus_source;
    gchar *bin_name;

    renderer->priv = priv = OWR_MEDIA_RENDERER_GET_PRIVATE(renderer);

    priv->media_type = DEFAULT_MEDIA_TYPE;
    priv->source = DEFAULT_SOURCE;
    priv->disabled = DEFAULT_DISABLED;

    priv->message_origin_bus_set = owr_message_origin_bus_set_new();

    bin_name = g_strdup_printf("media-renderer-%u", g_atomic_int_add(&unique_bin_id, 1));
    priv->pipeline = gst_pipeline_new(bin_name);
    gst_pipeline_use_clock(GST_PIPELINE(priv->pipeline), gst_system_clock_obtain());
    gst_element_set_base_time(priv->pipeline, _owr_get_base_time());
    gst_element_set_start_time(priv->pipeline, GST_CLOCK_TIME_NONE);
    g_free(bin_name);

#ifdef OWR_DEBUG
    g_signal_connect(priv->pipeline, "deep-notify", G_CALLBACK(_owr_deep_notify), NULL);
#endif

    priv->sink = NULL;
    priv->src = NULL;

    bus = gst_pipeline_get_bus(GST_PIPELINE(priv->pipeline));
    bus_source = gst_bus_create_watch(bus);
    g_source_set_callback(bus_source, (GSourceFunc) bus_call, priv->pipeline, NULL);
    g_source_attach(bus_source, _owr_get_main_context());
    g_source_unref(bus_source);

    g_mutex_init(&priv->media_renderer_lock);
}
/*
 * owr_local_media_source_get_pad
 *
 * The beginning of a media source chain in the pipeline looks like this:
 *                                                             +------------+
 *                                                         /---+ inter*sink |
 * +--------+    +--------+   +------------+   +-----+    /    +------------+
 * | source +----+ scale? +---+ capsfilter +---+ tee +---/
 * +--------+    +--------+   +------------+   +-----+   \
 *                                                        \    +------------+
 *                                                         \---+ inter*sink |
 *                                                             +------------+
 *
 * For each newly requested pad a new inter*sink is added to the tee.
 * Note that this is a completely independent pipeline, and the complete
 * pipeline is only created once for a specific media source.
 *
 * Then for each newly requested pad another bin with a inter*src is
 * created, which is then going to be part of the transport agent
 * pipeline. The ghostpad of it is what we return here.
 *
 * +-----------+   +-------------------------------+   +----------+
 * | inter*src +---+ converters/queues/capsfilters +---+ ghostpad |
 * +-----------+   +-------------------------------+   +----------+
 *
 */
static GstElement *owr_local_media_source_request_source(OwrMediaSource *media_source, GstCaps *caps)
{
    OwrLocalMediaSource *local_source;
    OwrLocalMediaSourcePrivate *priv;
    GstElement *source_element = NULL;
    GstElement *source_pipeline;
    GHashTable *event_data;
    GValue *value;
#if defined(__linux__) && !defined(__ANDROID__)
    gchar *tmp;
#endif

    g_assert(media_source);
    local_source = OWR_LOCAL_MEDIA_SOURCE(media_source);
    priv = local_source->priv;

    /* only create the source bin for this media source once */
    if ((source_pipeline = _owr_media_source_get_source_bin(media_source)))
        GST_DEBUG_OBJECT(media_source, "Re-using existing source element/bin");
    else {
        OwrMediaType media_type = OWR_MEDIA_TYPE_UNKNOWN;
        OwrSourceType source_type = OWR_SOURCE_TYPE_UNKNOWN;
        GstElement *source, *source_process = NULL, *capsfilter = NULL, *tee;
        GstPad *sinkpad, *source_pad;
        GEnumClass *media_enum_class, *source_enum_class;
        GEnumValue *media_enum_value, *source_enum_value;
        gchar *bin_name;
        GstCaps *source_caps;
        GstBus *bus;
        GSource *bus_source;

        event_data = _owr_value_table_new();
        value = _owr_value_table_add(event_data, "start_time", G_TYPE_INT64);
        g_value_set_int64(value, g_get_monotonic_time());

        g_object_get(media_source, "media-type", &media_type, "type", &source_type, NULL);

        media_enum_class = G_ENUM_CLASS(g_type_class_ref(OWR_TYPE_MEDIA_TYPE));
        source_enum_class = G_ENUM_CLASS(g_type_class_ref(OWR_TYPE_SOURCE_TYPE));
        media_enum_value = g_enum_get_value(media_enum_class, media_type);
        source_enum_value = g_enum_get_value(source_enum_class, source_type);

        bin_name = g_strdup_printf("local-%s-%s-source-bin-%u",
            media_enum_value ? media_enum_value->value_nick : "unknown",
            source_enum_value ? source_enum_value->value_nick : "unknown",
            g_atomic_int_add(&unique_bin_id, 1));

        g_type_class_unref(media_enum_class);
        g_type_class_unref(source_enum_class);

        source_pipeline = gst_pipeline_new(bin_name);
        gst_pipeline_use_clock(GST_PIPELINE(source_pipeline), gst_system_clock_obtain());
        gst_element_set_base_time(source_pipeline, _owr_get_base_time());
        gst_element_set_start_time(source_pipeline, GST_CLOCK_TIME_NONE);
        g_free(bin_name);
        bin_name = NULL;

#ifdef OWR_DEBUG
        g_signal_connect(source_pipeline, "deep-notify", G_CALLBACK(_owr_deep_notify), NULL);
#endif

        bus = gst_pipeline_get_bus(GST_PIPELINE(source_pipeline));
        bus_source = gst_bus_create_watch(bus);
        g_source_set_callback(bus_source, (GSourceFunc) bus_call, media_source, NULL);
        g_source_attach(bus_source, _owr_get_main_context());
        g_source_unref(bus_source);

        GST_DEBUG_OBJECT(local_source, "media_type: %d, type: %d", media_type, source_type);

        if (media_type == OWR_MEDIA_TYPE_UNKNOWN || source_type == OWR_SOURCE_TYPE_UNKNOWN) {
            GST_ERROR_OBJECT(local_source,
                "Cannot connect source with unknown type or media type to other component");
            goto done;
        }

        switch (media_type) {
        case OWR_MEDIA_TYPE_AUDIO:
            {
            switch (source_type) {
            case OWR_SOURCE_TYPE_CAPTURE:
                CREATE_ELEMENT(source, AUDIO_SRC, "audio-source");
#if !defined(__APPLE__) || !TARGET_IPHONE_SIMULATOR
/*
    Default values for buffer-time and latency-time on android are 200ms and 20ms.
    The minimum latency-time that can be used on Android is 20ms, and using
    a 40ms buffer-time with a 20ms latency-time causes crackling audio.
    So let's just stick with the defaults.
*/
#if !defined(__ANDROID__)
                g_object_set(source, "buffer-time", G_GINT64_CONSTANT(40000),
                    "latency-time", G_GINT64_CONSTANT(10000), NULL);
#endif
                if (priv->device_index > -1) {
#ifdef __APPLE__
                    g_object_set(source, "device", priv->device_index, NULL);
#elif defined(__linux__) && !defined(__ANDROID__)
                    tmp = g_strdup_printf("%d", priv->device_index);
                    g_object_set(source, "device", tmp, NULL);
                    g_free(tmp);
#endif
                }
#endif
                break;
            case OWR_SOURCE_TYPE_TEST:
                CREATE_ELEMENT(source, "audiotestsrc", "audio-source");
                g_object_set(source, "is-live", TRUE, NULL);
                break;
            case OWR_SOURCE_TYPE_UNKNOWN:
            default:
                g_assert_not_reached();
                goto done;
            }

            break;
            }
        case OWR_MEDIA_TYPE_VIDEO:
        {
            GstPad *srcpad;
            GstCaps *device_caps;

            switch (source_type) {
            case OWR_SOURCE_TYPE_CAPTURE:
                CREATE_ELEMENT(source, VIDEO_SRC, "video-source");
                if (priv->device_index > -1) {
#if defined(__APPLE__) && !TARGET_IPHONE_SIMULATOR
                    g_object_set(source, "device-index", priv->device_index, NULL);
#elif defined(__ANDROID__)
                    g_object_set(source, "cam-index", priv->device_index, NULL);
#elif defined(__linux__)
                    tmp = g_strdup_printf("/dev/video%d", priv->device_index);
                    g_object_set(source, "device", tmp, NULL);
                    g_free(tmp);
#endif
                }
                break;
            case OWR_SOURCE_TYPE_TEST: {
                GstElement *src, *time;
                GstPad *srcpad;

                source = gst_bin_new("video-source");

                CREATE_ELEMENT(src, "videotestsrc", "videotestsrc");
                g_object_set(src, "is-live", TRUE, NULL);
                gst_bin_add(GST_BIN(source), src);

                time = gst_element_factory_make("timeoverlay", "timeoverlay");
                if (time) {
                    g_object_set(time, "font-desc", "Sans 60", NULL);
                    gst_bin_add(GST_BIN(source), time);
                    gst_element_link(src, time);
                    srcpad = gst_element_get_static_pad(time, "src");
                } else
                    srcpad = gst_element_get_static_pad(src, "src");

                gst_element_add_pad(source, gst_ghost_pad_new("src", srcpad));
                gst_object_unref(srcpad);

                break;
            }
            case OWR_SOURCE_TYPE_UNKNOWN:
            default:
                g_assert_not_reached();
                goto done;
            }

            /* First try to see if we can just get the format we want directly */

            source_caps = gst_caps_new_empty();
#if GST_CHECK_VERSION(1, 5, 0)
            gst_caps_foreach(caps, fix_video_caps_framerate, source_caps);
#else
            _owr_gst_caps_foreach(caps, fix_video_caps_framerate, source_caps);
#endif
            /* Now see what the device can really produce */
            srcpad = gst_element_get_static_pad(source, "src");
            gst_element_set_state(source, GST_STATE_READY);
            device_caps = gst_pad_query_caps(srcpad, source_caps);

            if (gst_caps_is_empty(device_caps)) {
                /* Let's see if it works when we drop format constraints (which can be dealt with downsteram) */
                GstCaps *tmp = source_caps;
                source_caps = gst_caps_new_empty();
#if GST_CHECK_VERSION(1, 5, 0)
                gst_caps_foreach(tmp, fix_video_caps_format, source_caps);
#else
                _owr_gst_caps_foreach(tmp, fix_video_caps_format, source_caps);
#endif
                gst_caps_unref(tmp);

                gst_caps_unref(device_caps);
                device_caps = gst_pad_query_caps(srcpad, source_caps);

                if (gst_caps_is_empty(device_caps)) {
                    /* Accepting any format didn't work, we're going to hope that scaling fixes it */
                    CREATE_ELEMENT(source_process, "videoscale", "video-source-scale");
                    gst_bin_add(GST_BIN(source_pipeline), source_process);
                }
            }

            gst_caps_unref(device_caps);
            gst_object_unref(srcpad);

#if defined(__APPLE__) && TARGET_OS_IPHONE && !TARGET_IPHONE_SIMULATOR
            /* Force NV12 on iOS else the source can negotiate BGRA
             * ercolorspace can do NV12 -> BGRA and NV12 -> I420 which is what
             * is needed for Bowser */
            gst_caps_set_simple(source_caps, "format", G_TYPE_STRING, "NV12", NULL);
#endif

            CREATE_ELEMENT(capsfilter, "capsfilter", "video-source-capsfilter");
            g_object_set(capsfilter, "caps", source_caps, NULL);
            gst_caps_unref(source_caps);
            gst_bin_add(GST_BIN(source_pipeline), capsfilter);

            break;
        }
        case OWR_MEDIA_TYPE_UNKNOWN:
        default:
            g_assert_not_reached();
            goto done;
        }
        g_assert(source);

        source_pad = gst_element_get_static_pad(source, "src");
        g_signal_connect(source_pad, "notify::caps", G_CALLBACK(on_caps), media_source);
        gst_object_unref(source_pad);

        CREATE_ELEMENT(tee, "tee", "source-tee");
        g_object_set(tee, "allow-not-linked", TRUE, NULL);

        gst_bin_add_many(GST_BIN(source_pipeline), source, tee, NULL);

        /* Many sources don't like reconfiguration and it's pointless
         * here anyway right now. No need to reconfigure whenever something
         * is added to the tee or removed.
         * We will have to implement reconfiguration differently later by
         * selecting the best caps based on all consumers.
         */
        sinkpad = gst_element_get_static_pad(tee, "sink");
        gst_pad_add_probe(sinkpad, GST_PAD_PROBE_TYPE_EVENT_UPSTREAM, drop_reconfigure_event, NULL, NULL);
        gst_object_unref(sinkpad);

        if (!source)
            GST_ERROR_OBJECT(media_source, "Failed to create source element!");

        if (capsfilter) {
            LINK_ELEMENTS(capsfilter, tee);
            if (source_process) {
                LINK_ELEMENTS(source_process, capsfilter);
                LINK_ELEMENTS(source, source_process);
            } else
                LINK_ELEMENTS(source, capsfilter);
        } else if (source_process) {
            LINK_ELEMENTS(source_process, tee);
            LINK_ELEMENTS(source, source_process);
        } else
            LINK_ELEMENTS(source, tee);

        gst_element_sync_state_with_parent(tee);
        if (capsfilter)
            gst_element_sync_state_with_parent(capsfilter);
        if (source_process)
            gst_element_sync_state_with_parent(source_process);
        gst_element_sync_state_with_parent(source);

        _owr_media_source_set_source_bin(media_source, source_pipeline);
        _owr_media_source_set_source_tee(media_source, tee);
        if (gst_element_set_state(source_pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
            GST_ERROR("Failed to set local source pipeline %s to playing", GST_OBJECT_NAME(source_pipeline));
            /* FIXME: We should handle this and don't expose the source */
        }

        value = _owr_value_table_add(event_data, "end_time", G_TYPE_INT64);
        g_value_set_int64(value, g_get_monotonic_time());
        OWR_POST_EVENT(media_source, LOCAL_SOURCE_STARTED, event_data);

        g_signal_connect(tee, "pad-removed", G_CALLBACK(tee_pad_removed_cb), media_source);
    }
    gst_object_unref(source_pipeline);

    source_element = OWR_MEDIA_SOURCE_CLASS(owr_local_media_source_parent_class)->request_source(media_source, caps);

done:
    return source_element;
}
Example #6
0
static BOOL tsmf_gstreamer_decodeEx(ITSMFDecoder* decoder, const BYTE *data, UINT32 data_size, UINT32 extensions,
									UINT64 start_time, UINT64 end_time, UINT64 duration)
{
	GstBuffer *gst_buf;
	TSMFGstreamerDecoder* mdecoder = (TSMFGstreamerDecoder *) decoder;
	UINT64 sample_time = tsmf_gstreamer_timestamp_ms_to_gst(start_time);
	BOOL useTimestamps = TRUE;

	if (!mdecoder)
	{
		WLog_ERR(TAG, "Decoder not initialized!");
		return FALSE;
	}

	/*
	 * This function is always called from a stream-specific thread.
	 * It should be alright to block here if necessary.
	 * We don't expect to block here often, since the pipeline should
	 * have more than enough buffering.
	 */
	DEBUG_TSMF("%s. Start:(%"PRIu64") End:(%"PRIu64") Duration:(%"PRIu64") Last Start:(%"PRIu64")",
			   get_type(mdecoder), start_time, end_time, duration,
			   mdecoder->last_sample_start_time);

	if (mdecoder->shutdown)
	{
		WLog_ERR(TAG, "decodeEx called on shutdown decoder");
		return TRUE;
	}

	if (mdecoder->gst_caps == NULL)
	{
		WLog_ERR(TAG, "tsmf_gstreamer_set_format not called or invalid format.");
		return FALSE;
	}

	if (!mdecoder->pipe)
		tsmf_gstreamer_pipeline_build(mdecoder);

	if (!mdecoder->src)
	{
		WLog_ERR(TAG, "failed to construct pipeline correctly. Unable to push buffer to source element.");
		return FALSE;
	}

	gst_buf = tsmf_get_buffer_from_data(data, data_size);

	if (gst_buf == NULL)
	{
		WLog_ERR(TAG, "tsmf_get_buffer_from_data(%p, %"PRIu32") failed.", (void*) data, data_size);
		return FALSE;
	}

	/* Relative timestamping will sometimes be set to 0
	 * so we ignore these timestamps just to be safe(bit 8)
	 */
	if (extensions & 0x00000080)
	{
		DEBUG_TSMF("Ignoring the timestamps - relative - bit 8");
		useTimestamps = FALSE;
	}

	/* If no timestamps exist then we dont want to look at the timestamp values (bit 7) */
	if (extensions & 0x00000040)
	{
		DEBUG_TSMF("Ignoring the timestamps - none - bit 7");
		useTimestamps = FALSE;
	}

	/* If performing a seek */
	if (mdecoder->seeking)
	{
		mdecoder->seeking = FALSE;
		tsmf_gstreamer_pipeline_set_state(mdecoder, GST_STATE_PAUSED);
		mdecoder->pipeline_start_time_valid = 0;	
	}

	if (mdecoder->pipeline_start_time_valid)
	{
		DEBUG_TSMF("%s start time %"PRIu64"", get_type(mdecoder), start_time);

		/* Adjusted the condition for a seek to be based on start time only
		 * WMV1 and WMV2 files in particular have bad end time and duration values
		 * there seems to be no real side effects of just using the start time instead
		 */
		UINT64 minTime = mdecoder->last_sample_start_time - (UINT64) SEEK_TOLERANCE;
		UINT64 maxTime = mdecoder->last_sample_start_time + (UINT64) SEEK_TOLERANCE;

		/* Make sure the minTime stops at 0 , should we be at the beginning of the stream */ 
		if (mdecoder->last_sample_start_time < (UINT64) SEEK_TOLERANCE)
			minTime = 0;    

		/* If the start_time is valid and different from the previous start time by more than the seek tolerance, then we have a seek condition */
		if (((start_time > maxTime) || (start_time < minTime)) && useTimestamps)
		{
			DEBUG_TSMF("tsmf_gstreamer_decodeEx: start_time=[%"PRIu64"] > last_sample_start_time=[%"PRIu64"] OR ", start_time, mdecoder->last_sample_start_time);
			DEBUG_TSMF("tsmf_gstreamer_decodeEx: start_time=[%"PRIu64"] < last_sample_start_time=[%"PRIu64"] with", start_time, mdecoder->last_sample_start_time);
			DEBUG_TSMF("tsmf_gstreamer_decodeEX: a tolerance of more than [%lu] from the last sample", SEEK_TOLERANCE);
			DEBUG_TSMF("tsmf_gstreamer_decodeEX: minTime=[%"PRIu64"] maxTime=[%"PRIu64"]", minTime, maxTime);

			mdecoder->seeking = TRUE;

			/* since we cant make the gstreamer pipeline jump to the new start time after a seek - we just maintain
			 * a offset between realtime and gstreamer time
			 */
			mdecoder->seek_offset = start_time;
		}
	}
	else
	{
		DEBUG_TSMF("%s start time %"PRIu64"", get_type(mdecoder), start_time);
		/* Always set base/start time to 0. Will use seek offset to translate real buffer times
		 * back to 0. This allows the video to be started from anywhere and the ability to handle seeks
		 * without rebuilding the pipeline, etc. since that is costly
		 */
		gst_element_set_base_time(mdecoder->pipe, tsmf_gstreamer_timestamp_ms_to_gst(0));
		gst_element_set_start_time(mdecoder->pipe, tsmf_gstreamer_timestamp_ms_to_gst(0));
		mdecoder->pipeline_start_time_valid = 1;

		/* Set the seek offset if buffer has valid timestamps. */
		if (useTimestamps)
			mdecoder->seek_offset = start_time;

		if (!gst_element_seek(mdecoder->pipe, 1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH,
						GST_SEEK_TYPE_SET, 0,
						GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE))
		{
			WLog_ERR(TAG, "seek failed");
		}
	}

#if GST_VERSION_MAJOR > 0
	if (useTimestamps)
		GST_BUFFER_PTS(gst_buf) = sample_time - tsmf_gstreamer_timestamp_ms_to_gst(mdecoder->seek_offset);
	else
		GST_BUFFER_PTS(gst_buf) = GST_CLOCK_TIME_NONE;
#else
	if (useTimestamps)
		GST_BUFFER_TIMESTAMP(gst_buf) = sample_time - tsmf_gstreamer_timestamp_ms_to_gst(mdecoder->seek_offset);
	else
		GST_BUFFER_TIMESTAMP(gst_buf) = GST_CLOCK_TIME_NONE;
#endif
	GST_BUFFER_DURATION(gst_buf) = GST_CLOCK_TIME_NONE;
	GST_BUFFER_OFFSET(gst_buf) = GST_BUFFER_OFFSET_NONE;
#if GST_VERSION_MAJOR > 0
#else
	gst_buffer_set_caps(gst_buf, mdecoder->gst_caps);
#endif
	gst_app_src_push_buffer(GST_APP_SRC(mdecoder->src), gst_buf);

	/* Should only update the last timestamps if the current ones are valid */
	if (useTimestamps)
	{
		mdecoder->last_sample_start_time = start_time;
		mdecoder->last_sample_end_time = end_time;
	}

	if (mdecoder->pipe && (GST_STATE(mdecoder->pipe) != GST_STATE_PLAYING))
	{
		DEBUG_TSMF("%s: state=%s", get_type(mdecoder), gst_element_state_get_name(GST_STATE(mdecoder->pipe)));	

		DEBUG_TSMF("%s Paused: %"PRIi32"   Shutdown: %i   Ready: %"PRIi32"", get_type(mdecoder), mdecoder->paused, mdecoder->shutdown, mdecoder->ready);
		if (!mdecoder->paused && !mdecoder->shutdown && mdecoder->ready)
			tsmf_gstreamer_pipeline_set_state(mdecoder, GST_STATE_PLAYING);
	}

	return TRUE;
}