Exemplo n.º 1
0
EXPORT_C
#endif

void
gst_mixer_record_toggled (GstMixer * mixer,
    GstMixerTrack * track, gboolean record)
{
  GstStructure *s;
  GstMessage *m;

  g_return_if_fail (mixer != NULL);
  g_return_if_fail (GST_IS_ELEMENT (mixer));
  g_return_if_fail (track != NULL);

  s = gst_structure_new (GST_MIXER_MESSAGE_NAME,
      "type", G_TYPE_STRING, "record-toggled",
      "track", GST_TYPE_MIXER_TRACK, track,
      "record", G_TYPE_BOOLEAN, record, NULL);

  m = gst_message_new_element (GST_OBJECT (mixer), s);
  if (gst_element_post_message (GST_ELEMENT (mixer), m) == FALSE) {
    GST_WARNING ("This element has no bus, therefore no message sent!");
  }
}
Exemplo n.º 2
0
gboolean
shmdata_base_reader_process_error (shmdata_base_reader_t *reader,
                                   GstMessage *msg)
{
    if (NULL == reader)
    {
        g_warning ("%s: trying to process error of a NULL reader",
                   __FUNCTION__);
        return FALSE;
    }

    if (NULL == msg)
    {
        g_warning ("%s: trying to process error with a NULL msg",
                   __FUNCTION__);
        return FALSE;
    }

    switch (GST_MESSAGE_TYPE (msg))
    {
    case GST_MESSAGE_ERROR:
    {
        gchar *debug = NULL;
        GError *error = NULL;
        gst_message_parse_error (msg, &error, &debug);
        if (NULL != error && NULL != error->message)
            g_debug ("error: %s (%s)",
                     error->message,
                     GST_OBJECT_NAME (msg->src));
        else
            g_debug ("%s: error with error parsing");
        g_free (debug);
        if (g_strcmp0
                (GST_ELEMENT_NAME (reader->source_),
                 GST_OBJECT_NAME (msg->src)) == 0)
        {
            if (error->code == GST_RESOURCE_ERROR_READ)
                shmdata_base_reader_detach (reader);
            g_error_free (error);
            return TRUE;
        }

        if (g_strcmp0
                (GST_ELEMENT_NAME (reader->deserializer_),
                 GST_OBJECT_NAME (msg->src)) == 0)
        {
            if (GST_IS_BIN (reader->bin_) && GST_IS_ELEMENT (reader->deserializer_))
            {
                gst_object_ref (reader->deserializer_);
                gst_bin_remove (GST_BIN (reader->bin_), reader->deserializer_);
            }

            if (GST_IS_BIN (reader->bin_) && GST_IS_ELEMENT (reader->source_))
            {
                gst_object_ref (reader->source_);
                gst_bin_remove (GST_BIN (reader->bin_), reader->source_);
            }
            //g_idle_add ((GSourceFunc)shmdata_base_reader_recover_from_deserializer_error, (gpointer)reader);
            GSource *source = g_idle_source_new ();
            g_source_set_priority (source, G_PRIORITY_DEFAULT_IDLE);
            g_source_set_callback (source,
                                   (GSourceFunc)shmdata_base_reader_recover_from_deserializer_error,
                                   (gpointer)reader,
                                   NULL);
            g_source_attach (source, reader->g_main_context_);
            g_source_unref (source);

            g_error_free (error);
            return TRUE;
        }
        g_error_free (error);
        break;
    }
    default:
        break;
    }
    return FALSE;
}
Exemplo n.º 3
0
gboolean isValid (gstPlay *play) {
	if (play != NULL)
		if (GST_IS_ELEMENT (play->element)) return TRUE;
	return FALSE;
}
Exemplo n.º 4
0
static void
typefind_file (const gchar * filename)
{
  GstStateChangeReturn sret;
  GstElement *pipeline;
  GstElement *source;
  GstElement *typefind;
  GstElement *fakesink;
  GstState state;
  GstCaps *caps = NULL;
  GDir *dir;

  if ((dir = g_dir_open (filename, 0, NULL))) {
    const gchar *entry;

    while ((entry = g_dir_read_name (dir))) {
      gchar *path;

      path = g_strconcat (filename, G_DIR_SEPARATOR_S, entry, NULL);
      typefind_file (path);
      g_free (path);
    }

    g_dir_close (dir);
    return;
  }

  pipeline = gst_pipeline_new ("pipeline");

  source = gst_element_factory_make ("filesrc", "source");
  g_assert (GST_IS_ELEMENT (source));
  typefind = gst_element_factory_make ("typefind", "typefind");
  g_assert (GST_IS_ELEMENT (typefind));
  fakesink = gst_element_factory_make ("fakesink", "fakesink");
  g_assert (GST_IS_ELEMENT (typefind));

  gst_bin_add_many (GST_BIN (pipeline), source, typefind, fakesink, NULL);
  gst_element_link_many (source, typefind, fakesink, NULL);

  g_signal_connect (G_OBJECT (typefind), "have-type",
      G_CALLBACK (have_type_handler), &caps);

  g_object_set (source, "location", filename, NULL);

  GST_DEBUG ("Starting typefinding for %s", filename);

  /* typefind will only commit to PAUSED if it actually finds a type;
   * otherwise the state change fails */
  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PAUSED);

  /* wait until state change either completes or fails */
  sret = gst_element_get_state (GST_ELEMENT (pipeline), &state, NULL, -1);

  switch (sret) {
    case GST_STATE_CHANGE_FAILURE:{
      GstMessage *msg;
      GstBus *bus;
      GError *err = NULL;

      bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
      msg = gst_bus_poll (bus, GST_MESSAGE_ERROR, 0);
      gst_object_unref (bus);

      if (msg) {
        gst_message_parse_error (msg, &err, NULL);
        g_printerr ("%s - FAILED: %s\n", filename, err->message);
        g_error_free (err);
        gst_message_unref (msg);
      } else {
        g_printerr ("%s - FAILED: unknown error\n", filename);
      }
      break;
    }
    case GST_STATE_CHANGE_SUCCESS:{
      if (caps) {
        gchar *caps_str;

        caps_str = gst_caps_to_string (caps);
        g_print ("%s - %s\n", filename, caps_str);
        g_free (caps_str);
        gst_caps_unref (caps);
      } else {
        g_print ("%s - %s\n", filename, "No type found");
      }
      break;
    }
    default:
      g_assert_not_reached ();
  }

  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);
}
Exemplo n.º 5
0
static gboolean
run_pipeline (gpointer user_data)
{
  GstCaps *preview_caps = NULL;
  gchar *filename_str = NULL;
  GstElement *video_source = NULL;
  const gchar *filename_suffix;

  g_object_set (camerabin, "mode", mode, NULL);

  if (preview_caps_name != NULL) {
    preview_caps = gst_caps_from_string (preview_caps_name);
    if (preview_caps) {
      g_object_set (camerabin, "preview-caps", preview_caps, NULL);
      GST_DEBUG ("Preview caps set");
    } else
      GST_DEBUG ("Preview caps set but could not create caps from string");
  }

  set_metadata (camerabin);

  /* Construct filename */
  if (mode == MODE_VIDEO)
    filename_suffix = ".mp4";
  else
    filename_suffix = ".jpg";
  filename_str =
      g_strdup_printf ("%s/test_%04u%s", filename->str, capture_count,
      filename_suffix);
  GST_DEBUG ("Setting filename: %s", filename_str);
  g_object_set (camerabin, "location", filename_str, NULL);
  g_free (filename_str);

  g_object_get (camerabin, "camera-src", &video_source, NULL);
  if (video_source) {
    if (GST_IS_ELEMENT (video_source) &&
        gst_element_implements_interface (video_source, GST_TYPE_PHOTOGRAPHY)) {
      /* Set GstPhotography interface options. If option not given as
         command-line parameter use default of the source element. */
      if (scene_mode != SCENE_MODE_NONE)
        g_object_set (video_source, "scene-mode", scene_mode, NULL);
      if (ev_compensation != EV_COMPENSATION_NONE)
        g_object_set (video_source, "ev-compensation", ev_compensation, NULL);
      if (aperture != APERTURE_NONE)
        g_object_set (video_source, "aperture", aperture, NULL);
      if (flash_mode != FLASH_MODE_NONE)
        g_object_set (video_source, "flash-mode", flash_mode, NULL);
      if (exposure != EXPOSURE_NONE)
        g_object_set (video_source, "exposure", exposure, NULL);
      if (iso_speed != ISO_SPEED_NONE)
        g_object_set (video_source, "iso-speed", iso_speed, NULL);
      if (wb_mode != WHITE_BALANCE_MODE_NONE)
        g_object_set (video_source, "white-balance-mode", wb_mode, NULL);
      if (color_mode != COLOR_TONE_MODE_NONE)
        g_object_set (video_source, "colour-tone-mode", color_mode, NULL);
    }
    g_object_unref (video_source);
  }
  g_object_set (camerabin, "zoom", zoom / 100.0f, NULL);

  capture_count++;
  g_timer_start (timer);
  g_signal_emit_by_name (camerabin, "start-capture", 0);


  if (mode == MODE_VIDEO) {
    g_timeout_add ((capture_time * 1000), (GSourceFunc) stop_capture, NULL);
  }

  return FALSE;
}
Exemplo n.º 6
0
/**
 * gst_gl_handle_context_query:
 * @element: a #GstElement
 * @query: a #GstQuery of type %GST_QUERY_CONTEXT
 * @display: (transfer none) (nullable): a #GstGLDisplay
 * @context: (transfer none) (nullable): a #GstGLContext
 * @other_context: (transfer none) (nullable): application provided #GstGLContext
 *
 * Returns: Whether the @query was successfully responded to from the passed
 *          @display, @context, and @other_context.
 */
gboolean
gst_gl_handle_context_query (GstElement * element, GstQuery * query,
    GstGLDisplay * display, GstGLContext * gl_context,
    GstGLContext * other_context)
{
  const gchar *context_type;
  GstContext *context, *old_context;

  g_return_val_if_fail (GST_IS_ELEMENT (element), FALSE);
  g_return_val_if_fail (GST_IS_QUERY (query), FALSE);
  g_return_val_if_fail (display == NULL || GST_IS_GL_DISPLAY (display), FALSE);
  g_return_val_if_fail (gl_context == NULL
      || GST_IS_GL_CONTEXT (gl_context), FALSE);
  g_return_val_if_fail (other_context == NULL
      || GST_IS_GL_CONTEXT (other_context), FALSE);

  GST_LOG_OBJECT (element, "handle context query %" GST_PTR_FORMAT, query);
  gst_query_parse_context_type (query, &context_type);

  if (display && g_strcmp0 (context_type, GST_GL_DISPLAY_CONTEXT_TYPE) == 0) {
    gst_query_parse_context (query, &old_context);

    if (old_context)
      context = gst_context_copy (old_context);
    else
      context = gst_context_new (GST_GL_DISPLAY_CONTEXT_TYPE, TRUE);

    gst_context_set_gl_display (context, display);
    gst_query_set_context (query, context);
    gst_context_unref (context);
    GST_DEBUG_OBJECT (element, "successfully set %" GST_PTR_FORMAT
        " on %" GST_PTR_FORMAT, display, query);

    return TRUE;
  }
#if GST_GL_HAVE_WINDOW_X11
  else if (display && g_strcmp0 (context_type, "gst.x11.display.handle") == 0) {
    GstStructure *s;

    gst_query_parse_context (query, &old_context);

    if (old_context)
      context = gst_context_copy (old_context);
    else
      context = gst_context_new ("gst.x11.display.handle", TRUE);

    if (gst_gl_display_get_handle_type (display) & GST_GL_DISPLAY_TYPE_X11) {
      Display *x11_display = (Display *) gst_gl_display_get_handle (display);

      if (x11_display) {
        s = gst_context_writable_structure (context);
        gst_structure_set (s, "display", G_TYPE_POINTER, x11_display, NULL);

        gst_query_set_context (query, context);
        gst_context_unref (context);

        GST_DEBUG_OBJECT (element, "successfully set x11 display %p (from %"
            GST_PTR_FORMAT ") on %" GST_PTR_FORMAT, x11_display, display,
            query);

        return TRUE;
      }
    }
  }
#endif
#if GST_GL_HAVE_WINDOW_WAYLAND
  else if (display
      && g_strcmp0 (context_type, "GstWaylandDisplayHandleContextType") == 0) {
    GstStructure *s;

    gst_query_parse_context (query, &old_context);

    if (old_context)
      context = gst_context_copy (old_context);
    else
      context = gst_context_new ("GstWaylandDisplayHandleContextType", TRUE);

    if (gst_gl_display_get_handle_type (display) & GST_GL_DISPLAY_TYPE_WAYLAND) {
      struct wl_display *wayland_display =
          (struct wl_display *) gst_gl_display_get_handle (display);

      if (wayland_display) {
        s = gst_context_writable_structure (context);
        gst_structure_set (s, "display", G_TYPE_POINTER, wayland_display, NULL);

        gst_query_set_context (query, context);
        gst_context_unref (context);

        GST_DEBUG_OBJECT (element, "successfully set wayland display %p (from %"
            GST_PTR_FORMAT ") on %" GST_PTR_FORMAT, wayland_display, display,
            query);

        return TRUE;
      }
    }
  }
#endif
  else if (other_context && g_strcmp0 (context_type, "gst.gl.app_context") == 0) {
    GstStructure *s;

    gst_query_parse_context (query, &old_context);

    if (old_context)
      context = gst_context_copy (old_context);
    else
      context = gst_context_new ("gst.gl.app_context", TRUE);

    s = gst_context_writable_structure (context);
    gst_structure_set (s, "context", GST_TYPE_GL_CONTEXT, other_context, NULL);
    gst_query_set_context (query, context);
    gst_context_unref (context);

    GST_DEBUG_OBJECT (element, "successfully set application GL context %"
        GST_PTR_FORMAT " on %" GST_PTR_FORMAT, other_context, query);

    return TRUE;
  } else if (gl_context
      && g_strcmp0 (context_type, "gst.gl.local_context") == 0) {
    GstStructure *s;

    gst_query_parse_context (query, &old_context);

    if (old_context)
      context = gst_context_copy (old_context);
    else
      context = gst_context_new ("gst.gl.local_context", TRUE);

    s = gst_context_writable_structure (context);
    gst_structure_set (s, "context", GST_TYPE_GL_CONTEXT, gl_context, NULL);
    gst_query_set_context (query, context);
    gst_context_unref (context);

    GST_DEBUG_OBJECT (element, "successfully set GL context %"
        GST_PTR_FORMAT " on %" GST_PTR_FORMAT, gl_context, query);

    return TRUE;
  }

  return FALSE;
}
Exemplo n.º 7
0
void eServiceMP3Record::gstBusCall(GstMessage *msg)
{
	if (!msg)
		return;
	ePtr<iRecordableService> ptr = this;
	gchar *sourceName;
	GstObject *source;
	source = GST_MESSAGE_SRC(msg);
	if (!GST_IS_OBJECT(source))
		return;
	sourceName = gst_object_get_name(source);
	switch (GST_MESSAGE_TYPE (msg))
	{
		case GST_MESSAGE_EOS:
			eDebug("[eMP3ServiceRecord] gstBusCall eos event");
			// Stream end -> stop recording
			m_event((iRecordableService*)this, evGstRecordEnded);
			break;
		case GST_MESSAGE_STATE_CHANGED:
		{
			if(GST_MESSAGE_SRC(msg) != GST_OBJECT(m_recording_pipeline))
				break;

			GstState old_state, new_state;
			gst_message_parse_state_changed(msg, &old_state, &new_state, NULL);

			if(old_state == new_state)
				break;

			GstStateChange transition = (GstStateChange)GST_STATE_TRANSITION(old_state, new_state);
			eDebug("[eMP3ServiceRecord] gstBusCall state transition %s -> %s", gst_element_state_get_name(old_state), gst_element_state_get_name(new_state));
			switch(transition)
			{
				case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
				{
					if (m_streamingsrc_timeout)
						m_streamingsrc_timeout->stop();
					break;
				}
				default:
					break;
			}
			break;
		}
		case GST_MESSAGE_ERROR:
		{
			gchar *debug;
			GError *err;
			gst_message_parse_error(msg, &err, &debug);
			g_free(debug);
			if (err->code != GST_STREAM_ERROR_CODEC_NOT_FOUND)
				eWarning("[eServiceMP3Record] gstBusCall Gstreamer error: %s (%i) from %s", err->message, err->code, sourceName);
			g_error_free(err);
			break;
		}
		case GST_MESSAGE_ELEMENT:
		{
			const GstStructure *msgstruct = gst_message_get_structure(msg);
			if (msgstruct)
			{
				if (gst_is_missing_plugin_message(msg))
				{
					GstCaps *caps = NULL;
					gst_structure_get (msgstruct, "detail", GST_TYPE_CAPS, &caps, NULL);
					if (caps)
					{
						std::string codec = (const char*) gst_caps_to_string(caps);
						eDebug("[eServiceMP3Record] gstBusCall cannot record because of incompatible codecs %s", codec.c_str());
						gst_caps_unref(caps);
					}
				}
				else
				{
					const gchar *eventname = gst_structure_get_name(msgstruct);
					if (eventname)
					{
						if (!strcmp(eventname, "redirect"))
						{
							const char *uri = gst_structure_get_string(msgstruct, "new-location");
							eDebug("[eServiceMP3Record] gstBusCall redirect to %s", uri);
							gst_element_set_state (m_recording_pipeline, GST_STATE_NULL);
							g_object_set(G_OBJECT (m_source), "uri", uri, NULL);
							gst_element_set_state (m_recording_pipeline, GST_STATE_PLAYING);
						}
					}
				}
			}
			break;
		}
		case GST_MESSAGE_STREAM_STATUS:
		{
			GstStreamStatusType type;
			GstElement *owner;
			gst_message_parse_stream_status (msg, &type, &owner);
			if (type == GST_STREAM_STATUS_TYPE_CREATE)
			{
				if (GST_IS_PAD(source))
					owner = gst_pad_get_parent_element(GST_PAD(source));
				else if (GST_IS_ELEMENT(source))
					owner = GST_ELEMENT(source);
				else
					owner = 0;
				if (owner)
				{
					GstState state;
					gst_element_get_state(m_recording_pipeline, &state, NULL, 0LL);
					GstElementFactory *factory = gst_element_get_factory(GST_ELEMENT(owner));
					const gchar *name = gst_plugin_feature_get_name(GST_PLUGIN_FEATURE(factory));
					if (!strcmp(name, "souphttpsrc") && (state == GST_STATE_READY) && !m_streamingsrc_timeout->isActive())
					{
						m_streamingsrc_timeout->start(HTTP_TIMEOUT*1000, true);
						g_object_set (G_OBJECT (owner), "timeout", HTTP_TIMEOUT, NULL);
						eDebug("[eServiceMP3Record] gstBusCall setting timeout on %s to %is", name, HTTP_TIMEOUT);
					}
				}
				if (GST_IS_PAD(source))
					gst_object_unref(owner);
			}
			break;
		}
		default:
			break;
	}
	g_free(sourceName);
}
Exemplo n.º 8
0
void
fs_utils_set_bitrate (GstElement *element, glong bitrate)
{
  GParamSpec *spec;
  const char *elements_in_kbps[] = { "lamemp3enc", "lame", "x264enc", "twolame",
    "mpeg2enc", NULL
  };
  int i;
  GstElementFactory *factory;
  const gchar *factory_name = NULL;

  g_return_if_fail (GST_IS_ELEMENT (element));

  spec = g_object_class_find_property (G_OBJECT_GET_CLASS (element), "bitrate");
  g_return_if_fail (spec != NULL);

  factory = gst_element_get_factory (element);
  if (factory)
    factory_name = gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (factory));

  /* divide by 1000 for elements that are known to use kbs */
  for (i = 0; elements_in_kbps[i]; i++)
    if (factory_name && !strcmp (factory_name, elements_in_kbps[i]))
    {
      bitrate /= 1000;
      break;
    }

  if (G_PARAM_SPEC_TYPE (spec) == G_TYPE_LONG)
  {
    g_object_set (element, "bitrate", (glong) CLAMP (bitrate,
            G_PARAM_SPEC_LONG (spec)->minimum,
            G_PARAM_SPEC_LONG (spec)->maximum), NULL);
  }
  else if (G_PARAM_SPEC_VALUE_TYPE (spec) == G_TYPE_ULONG)
  {
    g_object_set (element, "bitrate", (gulong) CLAMP (bitrate,
            G_PARAM_SPEC_ULONG (spec)->minimum,
            G_PARAM_SPEC_ULONG (spec)->maximum), NULL);
  }
  else if (G_PARAM_SPEC_VALUE_TYPE (spec) == G_TYPE_INT)
  {
    gint tmp = MIN (bitrate, G_MAXINT);

    g_object_set (element, "bitrate", (gint)  CLAMP (tmp,
            G_PARAM_SPEC_INT (spec)->minimum,
            G_PARAM_SPEC_INT (spec)->maximum), NULL);
  }
  else if (G_PARAM_SPEC_VALUE_TYPE (spec) == G_TYPE_UINT)
  {
    guint tmp = MIN (bitrate, G_MAXUINT);

    g_object_set (element, "bitrate", (guint) CLAMP (tmp,
            G_PARAM_SPEC_UINT (spec)->minimum,
            G_PARAM_SPEC_UINT (spec)->maximum), NULL);
  }
  else
  {
    g_warning ("bitrate parameter of unknown type");
  }
}
Exemplo n.º 9
0
void eServiceMP3::gstBusCall(GstMessage *msg)
{
	if (!msg)
		return;
	gchar *sourceName;
	GstObject *source;
	source = GST_MESSAGE_SRC(msg);
	if (!GST_IS_OBJECT(source))
		return;
	sourceName = gst_object_get_name(source);
#if 0
	gchar *string;
	if (gst_message_get_structure(msg))
		string = gst_structure_to_string(gst_message_get_structure(msg));
	else
		string = g_strdup(GST_MESSAGE_TYPE_NAME(msg));
	eDebug("eTsRemoteSource::gst_message from %s: %s", sourceName, string);
	g_free(string);
#endif
	switch (GST_MESSAGE_TYPE (msg))
	{
		case GST_MESSAGE_EOS:
			m_event((iPlayableService*)this, evEOF);
			break;
		case GST_MESSAGE_STATE_CHANGED:
		{
			if(GST_MESSAGE_SRC(msg) != GST_OBJECT(m_gst_playbin))
				break;

			GstState old_state, new_state;
			gst_message_parse_state_changed(msg, &old_state, &new_state, NULL);
		
			if(old_state == new_state)
				break;
	
			eDebug("eServiceMP3::state transition %s -> %s", gst_element_state_get_name(old_state), gst_element_state_get_name(new_state));
	
			GstStateChange transition = (GstStateChange)GST_STATE_TRANSITION(old_state, new_state);
	
			switch(transition)
			{
				case GST_STATE_CHANGE_NULL_TO_READY:
				{
				}	break;
				case GST_STATE_CHANGE_READY_TO_PAUSED:
				{
					GstElement *subsink = gst_bin_get_by_name(GST_BIN(m_gst_playbin), "subtitle_sink");
					if (subsink)
					{
#ifdef GSTREAMER_SUBTITLE_SYNC_MODE_BUG
						/* 
						 * HACK: disable sync mode for now, gstreamer suffers from a bug causing sparse streams to loose sync, after pause/resume / skip
						 * see: https://bugzilla.gnome.org/show_bug.cgi?id=619434
						 * Sideeffect of using sync=false is that we receive subtitle buffers (far) ahead of their
						 * display time.
						 * Not too far ahead for subtitles contained in the media container.
						 * But for external srt files, we could receive all subtitles at once.
						 * And not just once, but after each pause/resume / skip.
						 * So as soon as gstreamer has been fixed to keep sync in sparse streams, sync needs to be re-enabled.
						 */
						g_object_set (G_OBJECT (subsink), "sync", FALSE, NULL);
#endif
#if 0
						/* we should not use ts-offset to sync with the decoder time, we have to do our own decoder timekeeping */
						g_object_set (G_OBJECT (subsink), "ts-offset", -2L * GST_SECOND, NULL);
						/* late buffers probably will not occur very often */
						g_object_set (G_OBJECT (subsink), "max-lateness", 0L, NULL);
						/* avoid prerolling (it might not be a good idea to preroll a sparse stream) */
						g_object_set (G_OBJECT (subsink), "async", TRUE, NULL);
#endif
						eDebug("eServiceMP3::subsink properties set!");
						gst_object_unref(subsink);
					}
					setAC3Delay(ac3_delay);
					setPCMDelay(pcm_delay);
				}	break;
				case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
				{
					if ( m_sourceinfo.is_streaming && m_streamingsrc_timeout )
						m_streamingsrc_timeout->stop();
				}	break;
				case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
				{
				}	break;
				case GST_STATE_CHANGE_PAUSED_TO_READY:
				{
				}	break;
				case GST_STATE_CHANGE_READY_TO_NULL:
				{
				}	break;
			}
			break;
		}
		case GST_MESSAGE_ERROR:
		{
			gchar *debug;
			GError *err;
			gst_message_parse_error (msg, &err, &debug);
			g_free (debug);
			eWarning("Gstreamer error: %s (%i) from %s", err->message, err->code, sourceName );
			if ( err->domain == GST_STREAM_ERROR )
			{
				if ( err->code == GST_STREAM_ERROR_CODEC_NOT_FOUND )
				{
					if ( g_strrstr(sourceName, "videosink") )
						m_event((iPlayableService*)this, evUser+11);
					else if ( g_strrstr(sourceName, "audiosink") )
						m_event((iPlayableService*)this, evUser+10);
				}
			}
			g_error_free(err);
			break;
		}
		case GST_MESSAGE_INFO:
		{
			gchar *debug;
			GError *inf;
	
			gst_message_parse_info (msg, &inf, &debug);
			g_free (debug);
			if ( inf->domain == GST_STREAM_ERROR && inf->code == GST_STREAM_ERROR_DECODE )
			{
				if ( g_strrstr(sourceName, "videosink") )
					m_event((iPlayableService*)this, evUser+14);
			}
			g_error_free(inf);
			break;
		}
		case GST_MESSAGE_TAG:
		{
			GstTagList *tags, *result;
			gst_message_parse_tag(msg, &tags);
	
			result = gst_tag_list_merge(m_stream_tags, tags, GST_TAG_MERGE_REPLACE);
			if (result)
			{
				if (m_stream_tags)
					gst_tag_list_free(m_stream_tags);
				m_stream_tags = result;
			}
	
			const GValue *gv_image = gst_tag_list_get_value_index(tags, GST_TAG_IMAGE, 0);
			if ( gv_image )
			{
				GstBuffer *buf_image;
				buf_image = gst_value_get_buffer (gv_image);
				int fd = open("/tmp/.id3coverart", O_CREAT|O_WRONLY|O_TRUNC, 0644);
				int ret = write(fd, GST_BUFFER_DATA(buf_image), GST_BUFFER_SIZE(buf_image));
				close(fd);
				eDebug("eServiceMP3::/tmp/.id3coverart %d bytes written ", ret);
				m_event((iPlayableService*)this, evUser+13);
			}
			gst_tag_list_free(tags);
			m_event((iPlayableService*)this, evUpdatedInfo);
			break;
		}
		case GST_MESSAGE_ASYNC_DONE:
		{
			if(GST_MESSAGE_SRC(msg) != GST_OBJECT(m_gst_playbin))
				break;

			GstTagList *tags;
			gint i, active_idx, n_video = 0, n_audio = 0, n_text = 0;

			g_object_get (m_gst_playbin, "n-video", &n_video, NULL);
			g_object_get (m_gst_playbin, "n-audio", &n_audio, NULL);
			g_object_get (m_gst_playbin, "n-text", &n_text, NULL);

			eDebug("eServiceMP3::async-done - %d video, %d audio, %d subtitle", n_video, n_audio, n_text);

			if ( n_video + n_audio <= 0 )
				stop();

			active_idx = 0;

			m_audioStreams.clear();
			m_subtitleStreams.clear();

			for (i = 0; i < n_audio; i++)
			{
				audioStream audio;
				gchar *g_codec, *g_lang;
				GstPad* pad = 0;
				g_signal_emit_by_name (m_gst_playbin, "get-audio-pad", i, &pad);
				GstCaps* caps = gst_pad_get_negotiated_caps(pad);
				if (!caps)
					continue;
				GstStructure* str = gst_caps_get_structure(caps, 0);
				const gchar *g_type = gst_structure_get_name(str);
				eDebug("AUDIO STRUCT=%s", g_type);
				audio.type = gstCheckAudioPad(str);
				g_codec = g_strdup(g_type);
				g_lang = g_strdup_printf ("und");
				g_signal_emit_by_name (m_gst_playbin, "get-audio-tags", i, &tags);
				if ( tags && gst_is_tag_list(tags) )
				{
					gst_tag_list_get_string(tags, GST_TAG_AUDIO_CODEC, &g_codec);
					gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &g_lang);
					gst_tag_list_free(tags);
				}
				audio.language_code = std::string(g_lang);
				audio.codec = std::string(g_codec);
				eDebug("eServiceMP3::audio stream=%i codec=%s language=%s", i, g_codec, g_lang);
				m_audioStreams.push_back(audio);
				g_free (g_lang);
				g_free (g_codec);
				gst_caps_unref(caps);
			}

			for (i = 0; i < n_text; i++)
			{	
				gchar *g_codec = NULL, *g_lang = NULL;
				g_signal_emit_by_name (m_gst_playbin, "get-text-tags", i, &tags);
				subtitleStream subs;

				g_lang = g_strdup_printf ("und");
				if ( tags && gst_is_tag_list(tags) )
				{
					gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &g_lang);
					gst_tag_list_get_string(tags, GST_TAG_SUBTITLE_CODEC, &g_codec);
					gst_tag_list_free(tags);
				}

				subs.language_code = std::string(g_lang);
				eDebug("eServiceMP3::subtitle stream=%i language=%s codec=%s", i, g_lang, g_codec);
				
				GstPad* pad = 0;
				g_signal_emit_by_name (m_gst_playbin, "get-text-pad", i, &pad);
				if ( pad )
					g_signal_connect (G_OBJECT (pad), "notify::caps", G_CALLBACK (gstTextpadHasCAPS), this);
				subs.type = getSubtitleType(pad, g_codec);

				m_subtitleStreams.push_back(subs);
				g_free (g_lang);
			}
			m_event((iPlayableService*)this, evUpdatedInfo);

			if ( m_errorInfo.missing_codec != "" )
			{
				if ( m_errorInfo.missing_codec.find("video/") == 0 || ( m_errorInfo.missing_codec.find("audio/") == 0 && getNumberOfTracks() == 0 ) )
					m_event((iPlayableService*)this, evUser+12);
			}
			break;
		}
		case GST_MESSAGE_ELEMENT:
		{
			if (const GstStructure *msgstruct = gst_message_get_structure(msg))
			{
				if ( gst_is_missing_plugin_message(msg) )
				{
					GstCaps *caps;
					gst_structure_get (msgstruct, "detail", GST_TYPE_CAPS, &caps, NULL); 
					std::string codec = (const char*) gst_caps_to_string(caps);
					gchar *description = gst_missing_plugin_message_get_description(msg);
					if ( description )
					{
						eDebug("eServiceMP3::m_errorInfo.missing_codec = %s", codec.c_str());
						m_errorInfo.error_message = "GStreamer plugin " + (std::string)description + " not available!\n";
						m_errorInfo.missing_codec = codec.substr(0,(codec.find_first_of(',')));
						g_free(description);
					}
					gst_caps_unref(caps);
				}
				else
				{
					const gchar *eventname = gst_structure_get_name(msgstruct);
					if ( eventname )
					{
						if (!strcmp(eventname, "eventSizeChanged") || !strcmp(eventname, "eventSizeAvail"))
						{
							gst_structure_get_int (msgstruct, "aspect_ratio", &m_aspect);
							gst_structure_get_int (msgstruct, "width", &m_width);
							gst_structure_get_int (msgstruct, "height", &m_height);
							if (strstr(eventname, "Changed"))
								m_event((iPlayableService*)this, evVideoSizeChanged);
						}
						else if (!strcmp(eventname, "eventFrameRateChanged") || !strcmp(eventname, "eventFrameRateAvail"))
						{
							gst_structure_get_int (msgstruct, "frame_rate", &m_framerate);
							if (strstr(eventname, "Changed"))
								m_event((iPlayableService*)this, evVideoFramerateChanged);
						}
						else if (!strcmp(eventname, "eventProgressiveChanged") || !strcmp(eventname, "eventProgressiveAvail"))
						{
							gst_structure_get_int (msgstruct, "progressive", &m_progressive);
							if (strstr(eventname, "Changed"))
								m_event((iPlayableService*)this, evVideoProgressiveChanged);
						}
					}
				}
			}
			break;
		}
		case GST_MESSAGE_BUFFERING:
		{
			GstBufferingMode mode;
			gst_message_parse_buffering(msg, &(m_bufferInfo.bufferPercent));
			gst_message_parse_buffering_stats(msg, &mode, &(m_bufferInfo.avgInRate), &(m_bufferInfo.avgOutRate), &(m_bufferInfo.bufferingLeft));
			m_event((iPlayableService*)this, evBuffering);
			break;
		}
		case GST_MESSAGE_STREAM_STATUS:
		{
			GstStreamStatusType type;
			GstElement *owner;
			gst_message_parse_stream_status (msg, &type, &owner);
			if ( type == GST_STREAM_STATUS_TYPE_CREATE && m_sourceinfo.is_streaming )
			{
				if ( GST_IS_PAD(source) )
					owner = gst_pad_get_parent_element(GST_PAD(source));
				else if ( GST_IS_ELEMENT(source) )
					owner = GST_ELEMENT(source);
				else
					owner = 0;
				if ( owner )
				{
					GstElementFactory *factory = gst_element_get_factory(GST_ELEMENT(owner));
					const gchar *name = gst_plugin_feature_get_name(GST_PLUGIN_FEATURE(factory));
					if (!strcmp(name, "souphttpsrc"))
					{
						m_streamingsrc_timeout->start(HTTP_TIMEOUT*1000, true);
						g_object_set (G_OBJECT (owner), "timeout", HTTP_TIMEOUT, NULL);
						eDebug("eServiceMP3::GST_STREAM_STATUS_TYPE_CREATE -> setting timeout on %s to %is", name, HTTP_TIMEOUT);
					}
					
				}
				if ( GST_IS_PAD(source) )
					gst_object_unref(owner);
			}
			break;
		}
		default:
			break;
	}
	g_free (sourceName);
}
Exemplo n.º 10
0
GdkPixbuf *
totem_gst_playbin_get_frame (GstElement *play)
{
  GstStructure *s;
  GstBuffer *buf = NULL;
  GdkPixbuf *pixbuf;
  GstCaps *to_caps;
  gint outwidth = 0;
  gint outheight = 0;

  g_return_val_if_fail (play != NULL, NULL);
  g_return_val_if_fail (GST_IS_ELEMENT (play), NULL);

  /* our desired output format (RGB24) */
  to_caps = gst_caps_new_simple ("video/x-raw-rgb",
      "bpp", G_TYPE_INT, 24,
      "depth", G_TYPE_INT, 24,
      /* Note: we don't ask for a specific width/height here, so that
       * videoscale can adjust dimensions from a non-1/1 pixel aspect
       * ratio to a 1/1 pixel-aspect-ratio. We also don't ask for a
       * specific framerate, because the input framerate won't
       * necessarily match the output framerate if there's a deinterlacer
       * in the pipeline. */
      "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
      "endianness", G_TYPE_INT, G_BIG_ENDIAN,
      "red_mask", G_TYPE_INT, 0xff0000,
      "green_mask", G_TYPE_INT, 0x00ff00,
      "blue_mask", G_TYPE_INT, 0x0000ff,
      NULL);

  /* get frame */
  g_signal_emit_by_name (play, "convert-frame", to_caps, &buf);
  gst_caps_unref (to_caps);

  if (!buf) {
    GST_DEBUG ("Could not take screenshot: %s",
        "failed to retrieve or convert video frame");
    g_warning ("Could not take screenshot: %s",
        "failed to retrieve or convert video frame");
    return NULL;
  }

  if (!GST_BUFFER_CAPS (buf)) {
    GST_DEBUG ("Could not take screenshot: %s", "no caps on output buffer");
    g_warning ("Could not take screenshot: %s", "no caps on output buffer");
    return NULL;
  }

  GST_DEBUG ("frame caps: %" GST_PTR_FORMAT, GST_BUFFER_CAPS (buf));

  s = gst_caps_get_structure (GST_BUFFER_CAPS (buf), 0);
  gst_structure_get_int (s, "width", &outwidth);
  gst_structure_get_int (s, "height", &outheight);
  g_return_val_if_fail (outwidth > 0 && outheight > 0, NULL);

  /* create pixbuf from that - use our own destroy function */
  pixbuf = gdk_pixbuf_new_from_data (GST_BUFFER_DATA (buf),
      GDK_COLORSPACE_RGB, FALSE, 8, outwidth, outheight,
      GST_ROUND_UP_4 (outwidth * 3), destroy_pixbuf, buf);

  if (!pixbuf) {
    GST_DEBUG ("Could not take screenshot: %s", "could not create pixbuf");
    g_warning ("Could not take screenshot: %s", "could not create pixbuf");
    gst_buffer_unref (buf);
  }

  return pixbuf;
}
Exemplo n.º 11
0
int main(int argc, char *argv[]) {
    //gstreamer launch codes
    char rpicamsrc22[] = "rpicamsrc keyframe-interval=10 preview=0 ! video/x-h264,width=400,height=240,framerate=30/1,profile=baseline ! tee name=t ! queue max-size-time=50000000 leaky=upstream ! rtph264pay config-interval=1 pt=96 ! udpsink host=192.168.1.22 port=9000 t. ! queue max-size-time=50000000 leaky=upstream ! avdec_h264 ! videorate ! video/x-raw,framerate=10/1 ! videoflip method=3 ! jpegenc ! multifilesink location=/var/www/html/tmp/snapshot.jpg";
    char rpicamsrc23[] = "rpicamsrc keyframe-interval=10 preview=0 ! video/x-h264,width=400,height=240,framerate=30/1,profile=baseline ! tee name=t ! queue max-size-time=50000000 leaky=upstream ! rtph264pay config-interval=1 pt=96 ! udpsink host=192.168.1.23 port=9000 t. ! queue max-size-time=50000000 leaky=upstream ! avdec_h264 ! videorate ! video/x-raw,framerate=10/1 ! videoflip method=3 ! jpegenc ! multifilesink location=/var/www/html/tmp/snapshot.jpg";

    char blank[] = "";

    char videotestsrc[] = "videotestsrc ! queue ! videoflip method=1 ! eglglessink";
    char videotestsrc_cubed[] = "videotestsrc ! queue ! glupload ! glfiltercube ! gldownload ! eglglessink";


    char libvisual_jess[] = 	"alsasrc device=hw:1 buffer-time=20000 ! queue max-size-time=50000000 leaky=upstream ! libvisual_jess ! video/x-raw,width=240,height=400,framerate=20/1 ! tee name=t ! queue ! videoflip method=1 ! eglglessink t. ! queue ! videorate ! video/x-raw,framerate=10/1 ! videoscale ! video/x-raw,width=240,height=400 ! jpegenc ! multifilesink location=/var/www/html/tmp/snapshot.jpg";
    char libvisual_infinite[] = "alsasrc device=hw:1 buffer-time=20000 ! queue max-size-time=50000000 leaky=upstream ! libvisual_infinite ! video/x-raw,width=400,height=240,framerate=20/1 ! tee name=t ! queue ! eglglessink t. ! queue ! videorate ! video/x-raw,framerate=10/1 ! videoflip method=3 ! jpegenc ! multifilesink location=/var/www/html/tmp/snapshot.jpg";
    char libvisual_jakdaw[] = 	"alsasrc device=hw:1 buffer-time=20000 ! queue max-size-time=50000000 leaky=upstream ! libvisual_jakdaw ! video/x-raw,width=400,height=240,framerate=20/1 ! tee name=t ! queue ! eglglessink t. ! queue ! videorate ! video/x-raw,framerate=10/1 ! videoflip method=3 ! jpegenc ! multifilesink location=/var/www/html/tmp/snapshot.jpg";
    char libvisual_oinksie[] = 	"alsasrc device=hw:1 buffer-time=20000 ! queue max-size-time=50000000 leaky=upstream ! libvisual_oinksie ! video/x-raw,width=400,height=240,framerate=20/1 ! tee name=t ! queue ! eglglessink t. ! queue ! videorate ! video/x-raw,framerate=10/1 ! videoflip method=3 ! jpegenc ! multifilesink location=/var/www/html/tmp/snapshot.jpg";
    char goom[] = 				"alsasrc device=hw:1 buffer-time=20000 ! queue max-size-time=50000000 leaky=upstream ! goom ! video/x-raw,width=320,height=240,framerate=20/1 ! tee name=t ! capssetter caps=video/x-raw,height=200 ! queue ! eglglessink t. ! queue ! videorate ! video/x-raw,framerate=10/1 ! videoflip method=3 ! videoscale ! video/x-raw,width=240,height=400 ! jpegenc ! multifilesink location=/var/www/html/tmp/snapshot.jpg";
    char goom2k1[] = 			"alsasrc device=hw:1 buffer-time=20000 ! queue max-size-time=50000000 leaky=upstream ! goom2k1 ! video/x-raw,width=240,height=400,framerate=20/1 ! tee name=t ! queue ! videoflip method=1 ! eglglessink t. ! queue ! videorate ! video/x-raw,framerate=10/1 ! videoscale ! video/x-raw,width=240,height=400 ! jpegenc ! multifilesink location=/var/www/html/tmp/snapshot.jpg";


    char normal[] =            "udpsrc port=9000 caps=application/x-rtp,media=(string)video,clock-rate=(int)90000,encoding-name=(string)H264 ! rtph264depay ! avdec_h264 ! queue max-size-time=50000000 leaky=upstream ! eglglessink";
    char glfiltercube[] =      "udpsrc port=9000 caps=application/x-rtp,media=(string)video,clock-rate=(int)90000,encoding-name=(string)H264 ! rtph264depay ! avdec_h264 ! videoconvert ! queue max-size-time=50000000 leaky=upstream ! glupload ! glfiltercube ! gldownload ! eglglessink";
    char gleffects_mirror[] =  "udpsrc port=9000 caps=application/x-rtp,media=(string)video,clock-rate=(int)90000,encoding-name=(string)H264 ! rtph264depay ! avdec_h264 ! queue max-size-time=50000000 leaky=upstream ! videoconvert ! queue max-size-time=50000000 leaky=upstream ! videoflip method=1 ! glupload ! gleffects_mirror ! gldownload ! videoflip method=3 ! eglglessink";
    char gleffects_squeeze[] = "udpsrc port=9000 caps=application/x-rtp,media=(string)video,clock-rate=(int)90000,encoding-name=(string)H264 ! rtph264depay ! avdec_h264 ! videoconvert ! queue max-size-time=50000000 leaky=upstream ! glupload ! gleffects_squeeze ! gldownload ! eglglessink";
    char gleffects_stretch[] = "udpsrc port=9000 caps=application/x-rtp,media=(string)video,clock-rate=(int)90000,encoding-name=(string)H264 ! rtph264depay ! avdec_h264 ! videoconvert ! queue max-size-time=50000000 leaky=upstream ! glupload ! gleffects_stretch ! gldownload ! eglglessink";
    char gleffects_tunnel[] =  "udpsrc port=9000 caps=application/x-rtp,media=(string)video,clock-rate=(int)90000,encoding-name=(string)H264 ! rtph264depay ! avdec_h264 ! videoconvert ! queue max-size-time=50000000 leaky=upstream ! glupload ! gleffects_tunnel ! gldownload ! eglglessink";
    char gleffects_twirl[] =   "udpsrc port=9000 caps=application/x-rtp,media=(string)video,clock-rate=(int)90000,encoding-name=(string)H264 ! rtph264depay ! avdec_h264 ! videoconvert ! queue max-size-time=50000000 leaky=upstream ! glupload ! gleffects_twirl ! gldownload ! eglglessink";
    char gleffects_bulge[] =   "udpsrc port=9000 caps=application/x-rtp,media=(string)video,clock-rate=(int)90000,encoding-name=(string)H264 ! rtph264depay ! avdec_h264 ! videoconvert ! queue max-size-time=50000000 leaky=upstream ! glupload ! gleffects_bulge ! gldownload ! eglglessink";
    char gleffects_heat[] =    "udpsrc port=9000 caps=application/x-rtp,media=(string)video,clock-rate=(int)90000,encoding-name=(string)H264 ! rtph264depay ! avdec_h264 ! videoconvert ! queue max-size-time=50000000 leaky=upstream ! glupload ! gleffects_heat ! gldownload ! eglglessink";
    char radioactv[] =         "udpsrc port=9000 caps=application/x-rtp,media=(string)video,clock-rate=(int)90000,encoding-name=(string)H264 ! rtph264depay ! avdec_h264 ! videoconvert ! queue max-size-time=50000000 leaky=upstream ! videoscale ! video/x-raw,width=320,height=240 ! radioactv !  capssetter caps=video/x-raw,height=200 ! eglglessink";
    char revtv[] =             "udpsrc port=9000 caps=application/x-rtp,media=(string)video,clock-rate=(int)90000,encoding-name=(string)H264 ! rtph264depay ! avdec_h264 ! videoconvert ! queue max-size-time=50000000 leaky=upstream ! videoflip method=3 ! revtv ! videoflip method=1 ! eglglessink";
    char agingtv[] =           "udpsrc port=9000 caps=application/x-rtp,media=(string)video,clock-rate=(int)90000,encoding-name=(string)H264 ! rtph264depay ! avdec_h264 ! videoconvert ! queue max-size-time=50000000 leaky=upstream ! videoflip method=1 ! agingtv ! videoflip method=3 ! eglglessink";
    char dicetv[] =            "udpsrc port=9000 caps=application/x-rtp,media=(string)video,clock-rate=(int)90000,encoding-name=(string)H264 ! rtph264depay ! avdec_h264 ! videoconvert ! queue max-size-time=50000000 leaky=upstream ! dicetv ! eglglessink";
    char warptv[] =            "udpsrc port=9000 caps=application/x-rtp,media=(string)video,clock-rate=(int)90000,encoding-name=(string)H264 ! rtph264depay ! avdec_h264 ! videoconvert ! queue max-size-time=50000000 leaky=upstream ! warptv ! eglglessink";
    char shagadelictv[] =      "udpsrc port=9000 caps=application/x-rtp,media=(string)video,clock-rate=(int)90000,encoding-name=(string)H264 ! rtph264depay ! avdec_h264 ! videoconvert ! queue max-size-time=50000000 leaky=upstream ! shagadelictv ! eglglessink";
    char vertigotv[] =         "udpsrc port=9000 caps=application/x-rtp,media=(string)video,clock-rate=(int)90000,encoding-name=(string)H264 ! rtph264depay ! avdec_h264 ! videoconvert ! queue max-size-time=50000000 leaky=upstream ! vertigotv ! eglglessink";
    char kaleidoscope[] =      "udpsrc port=9000 caps=application/x-rtp,media=(string)video,clock-rate=(int)90000,encoding-name=(string)H264 ! rtph264depay ! avdec_h264 ! videoconvert ! queue max-size-time=50000000 leaky=upstream ! kaleidoscope ! eglglessink";
    char marble[] =            "udpsrc port=9000 caps=application/x-rtp,media=(string)video,clock-rate=(int)90000,encoding-name=(string)H264 ! rtph264depay ! avdec_h264 ! videoconvert ! queue max-size-time=50000000 leaky=upstream ! marble ! eglglessink";
    char rippletv[] =          "udpsrc port=9000 caps=application/x-rtp,media=(string)video,clock-rate=(int)90000,encoding-name=(string)H264 ! rtph264depay ! avdec_h264 ! videoconvert ! queue max-size-time=50000000 leaky=upstream ! rippletv ! eglglessink";
    char edgetv[] =            "udpsrc port=9000 caps=application/x-rtp,media=(string)video,clock-rate=(int)90000,encoding-name=(string)H264 ! rtph264depay ! avdec_h264 ! videoconvert ! queue max-size-time=50000000 leaky=upstream ! edgetv ! eglglessink";

    char movie1[] =  "filesrc location=/home/pi/movies/1.mp4  ! qtdemux name=dmux ! queue ! h264parse ! omxh264dec ! videoflip method=1 ! eglglessink  dmux. ! queue ! aacparse !  avdec_aac ! audioconvert ! audio/x-raw,channels=2 ! alsasink device=hw:0";
    char movie2[] =  "filesrc location=/home/pi/movies/2.mp4  ! qtdemux name=dmux ! queue ! h264parse ! omxh264dec ! videoflip method=1 ! eglglessink  dmux. ! queue ! aacparse !  avdec_aac ! audioconvert ! audio/x-raw,channels=2 ! alsasink device=hw:0";
    char movie3[] =  "filesrc location=/home/pi/movies/3.mp4  ! qtdemux name=dmux ! queue ! h264parse ! omxh264dec ! videoflip method=1 ! eglglessink  dmux. ! queue ! aacparse !  avdec_aac ! audioconvert ! audio/x-raw,channels=2 ! alsasink device=hw:0";
    char movie4[] =  "filesrc location=/home/pi/movies/4.mp4  ! qtdemux name=dmux ! queue ! h264parse ! omxh264dec ! videoflip method=1 ! eglglessink  dmux. ! queue ! aacparse !  avdec_aac ! audioconvert ! audio/x-raw,channels=2 ! alsasink device=hw:0";
    char movie5[] =  "filesrc location=/home/pi/movies/5.mp4  ! qtdemux name=dmux ! queue ! h264parse ! omxh264dec ! videoflip method=1 ! eglglessink  dmux. ! queue ! aacparse !  avdec_aac ! audioconvert ! audio/x-raw,channels=2 ! alsasink device=hw:0";
    char movie6[] =  "filesrc location=/home/pi/movies/6.mp4  ! qtdemux name=dmux ! queue ! h264parse ! omxh264dec ! videoflip method=1 ! eglglessink  dmux. ! queue ! aacparse !  avdec_aac ! audioconvert ! audio/x-raw,channels=2 ! alsasink device=hw:0";
    char movie7[] =  "filesrc location=/home/pi/movies/7.mp4  ! qtdemux name=dmux ! queue ! h264parse ! omxh264dec ! videoflip method=1 ! eglglessink  dmux. ! queue ! aacparse !  avdec_aac ! audioconvert ! audio/x-raw,channels=2 ! alsasink device=hw:0";
    char movie8[] =  "filesrc location=/home/pi/movies/8.mp4  ! qtdemux name=dmux ! queue ! h264parse ! omxh264dec ! videoflip method=1 ! eglglessink  dmux. ! queue ! aacparse !  avdec_aac ! audioconvert ! audio/x-raw,channels=2 ! alsasink device=hw:0";
    char movie9[] =  "filesrc location=/home/pi/movies/9.mp4  ! qtdemux name=dmux ! queue ! h264parse ! omxh264dec ! videoflip method=1 ! eglglessink  dmux. ! queue ! aacparse !  avdec_aac ! audioconvert ! audio/x-raw,channels=2 ! alsasink device=hw:0";
    char movie10[] = "filesrc location=/home/pi/movies/10.mp4 ! qtdemux name=dmux ! queue ! h264parse ! omxh264dec ! videoflip method=1 ! eglglessink  dmux. ! queue ! aacparse !  avdec_aac ! audioconvert ! audio/x-raw,channels=2 ! alsasink device=hw:0";
    char movie11[] = "filesrc location=/home/pi/movies/11.mp4 ! qtdemux name=dmux ! queue ! h264parse ! omxh264dec ! videoflip method=1 ! eglglessink  dmux. ! queue ! aacparse !  avdec_aac ! audioconvert ! audio/x-raw,channels=2 ! alsasink device=hw:0";
    char movie12[] = "filesrc location=/home/pi/movies/12.mp4 ! qtdemux name=dmux ! queue ! h264parse ! omxh264dec ! videoflip method=1 ! eglglessink  dmux. ! queue ! aacparse !  avdec_aac ! audioconvert ! audio/x-raw,channels=2 ! alsasink device=hw:0";

    char * new_cmd;
    char * rpicamsrc;

    int requested_state = 0;
    int active_state = -1; //force change to state 0 on launch

    GstElement *pipeline = NULL;

    int changes = 0;

    if (argc != 2) {
        fprintf(stderr, "Need IP As Argument (22 or 23)\n");
        exit(1);
    }

    const char *arg1_gst[]  = {"gstvideo"};
    const char *arg2_gst[]  = {"--gst-disable-registry-update"};
    const char *arg3_gst[]  = {"--gst-debug-level=0"};
    char ** argv_gst[3] = {(char **)arg1_gst,(char **)arg2_gst,(char **)arg3_gst};
    int argc_gst = 3;
    /* Initialize GStreamer */
    gst_init (&argc_gst, argv_gst );

    //read src and set variables
    int ip = atoi(argv[1]);

    if (ip == 23) rpicamsrc = rpicamsrc22;
    else if (ip ==22) rpicamsrc = rpicamsrc23;
    else {
        fprintf(stderr, "Need IP As Argument (22 or 23)\n");
        exit(1);
    }

    //stats
    uint32_t time_start = 0;
    int missed = 0;
    uint32_t time_fps = 0;
    int fps = 0;
    uint32_t time_delay = 0;

    //non blocking sdtin read
    fcntl(STDIN_FILENO, F_SETFL, fcntl(STDIN_FILENO, F_GETFL, 0) | O_NONBLOCK);

    while (1) {

        time_start += 20;
        uint32_t predicted_delay = time_start - millis(); //calc predicted delay
        if (predicted_delay > 20) predicted_delay = 0; //check for overflow
        if (predicted_delay != 0) {
            delay(predicted_delay);
            time_delay += predicted_delay;
        } else {
            time_start = millis(); //reset timer to now
            printf("GST  Skipping Idle...\n");
            missed++;
        }

        int count = 1;
        char buffer[100];
        //stdin is line buffered so we can cheat a little bit
        while (count > 0) { // dump entire buffer
            count = read(STDIN_FILENO, buffer, sizeof(buffer)-1);
            if (count > 1) { //ignore blank lines
                buffer[count-1] = '\0';
                //keep most recent line
                int temp_state = 0;
                int result = sscanf(buffer,"%d", &temp_state);
                if (result != 1) {
                    fprintf(stderr, "GST  Unrecognized input with %d items.\n", result);
                } else {
                    requested_state = temp_state;
                }
            }
        }

        if (active_state != requested_state) {

            // start timer
            struct timeval t1, t2;
            double elapsedTime;
            gettimeofday(&t1, NULL);

            printf("GST  Starting Request: %d\n",requested_state);

            //figure out the correct app
            switch (requested_state) {
            //the basics
            case GST_BLANK:
                new_cmd = blank;
                break;
            case GST_VIDEOTESTSRC:
                new_cmd = videotestsrc;
                break;
            case GST_VIDEOTESTSRC_CUBED:
                new_cmd = videotestsrc_cubed;
                break;
            case GST_RPICAMSRC:
                new_cmd = rpicamsrc;
                break;
            case GST_NORMAL:
                new_cmd = normal;
                break;

            //libvisual 10 - 18
            case GST_LIBVISUAL_JESS:
                new_cmd = libvisual_jess;
                break;	  //good
            case GST_LIBVISUAL_INFINITE:
                new_cmd = libvisual_infinite;
                break;	//good
            case GST_LIBVISUAL_JAKDAW:
                new_cmd = libvisual_jakdaw;
                break;	//good
            case GST_LIBVISUAL_OINKSIE:
                new_cmd = libvisual_oinksie;
                break;	//good
            case GST_GOOM:
                new_cmd = goom;
                break;	//good
            case GST_GOOM2K1:
                new_cmd = goom2k1;
                break;	//good
            //tv effects
            case GST_RADIOACTV:
                new_cmd = radioactv;
                break;//fixed
            case GST_REVTV:
                new_cmd = revtv;
                break;//good
            case GST_AGINGTV:
                new_cmd = agingtv;
                break;//steampunk
            case GST_DICETV:
                new_cmd = dicetv;
                break;//works
            case GST_WARPTV:
                new_cmd = warptv;
                break;//works
            case GST_SHAGADELICTV:
                new_cmd = shagadelictv;
                break;//works
            case GST_VERTIGOTV:
                new_cmd = vertigotv;
                break;//works
            case GST_KALEIDOSCOPE:
                new_cmd = kaleidoscope;
                break;//
            case GST_MARBLE:
                new_cmd = marble;
                break;//
            case GST_RIPPLETV:
                new_cmd = rippletv;
                break;//works
            case GST_EDGETV:
                new_cmd = edgetv;
                break;//works
            //gl effects
            case GST_GLCUBE:
                new_cmd = glfiltercube;
                break;
            case GST_GLMIRROR:
                new_cmd = gleffects_mirror;
                break;
            case GST_GLSQUEEZE:
                new_cmd = gleffects_squeeze;
                break;
            case GST_GLSTRETCH:
                new_cmd = gleffects_stretch;
                break;
            case GST_GLTUNNEL:
                new_cmd = gleffects_tunnel;
                break;	//really good O
            case GST_GLTWIRL:
                new_cmd = gleffects_twirl;
                break; //creepy as f**k
            case GST_GLBULGE:
                new_cmd = gleffects_bulge;
                break;
            case GST_GLHEAT:
                new_cmd = gleffects_heat;
                break;

            case GST_MOVIE1:
                new_cmd = movie1;
                break;
            case GST_MOVIE2:
                new_cmd = movie2;
                break;
            case GST_MOVIE3:
                new_cmd = movie3;
                break;
            case GST_MOVIE4:
                new_cmd = movie4;
                break;
            case GST_MOVIE5:
                new_cmd = movie5;
                break;
            case GST_MOVIE6:
                new_cmd = movie6;
                break;
            case GST_MOVIE7:
                new_cmd = movie7;
                break;
            case GST_MOVIE8:
                new_cmd = movie8;
                break;
            case GST_MOVIE9:
                new_cmd = movie9;
                break;
            case GST_MOVIE10:
                new_cmd = movie10;
                break;
            case GST_MOVIE11:
                new_cmd = movie11;
                break;
            case GST_MOVIE12:
                new_cmd = movie12;
                break;

            default:
                //skip bad requests by claiming we already did it!
                requested_state = active_state;
            }

            if (active_state != requested_state) {

                //kill old pieline
                if (GST_IS_ELEMENT(pipeline)) {
                    gst_element_set_state (pipeline, GST_STATE_NULL);
                    gst_object_unref (pipeline);
                }

                //make new pipeline
                if (new_cmd != blank) {
                    pipeline = gst_parse_launch (new_cmd, NULL);
                }

                //new start pipeline
                if (GST_IS_ELEMENT(pipeline)) {
                    gst_element_set_state (pipeline, GST_STATE_PLAYING);
                }

                //mark request as completed
                active_state = requested_state;

                // stop timer
                gettimeofday(&t2, NULL);

                // compute and print the elapsed time in millisec
                elapsedTime = (t2.tv_sec - t1.tv_sec) * 1000.0;      // sec to ms
                elapsedTime += (t2.tv_usec - t1.tv_usec) / 1000.0;   // us to ms
                printf("GST  Lag to enter mode %d: %f ms\n",requested_state,elapsedTime);
                changes++;
            }
        }

        //fps calculations ran every second
        fps++;
        if (time_fps < millis()) {
            printf("GST  FPS:%d  mis:%d idle:%d%% changes:%d\n",fps,missed,time_delay/10,changes);
            fps = 0;
            time_delay = 0;
            time_fps += 1000;
            if (time_fps < millis()) {
                time_fps = millis()+1000;
            }
        }
    }

}
void
_bp_video_pipeline_setup (BansheePlayer *player, GstBus *bus)
{
    GstElement *videosink;
    
    g_return_if_fail (IS_BANSHEE_PLAYER (player));
    
    if (player->video_pipeline_setup_cb != NULL) {
        videosink = player->video_pipeline_setup_cb (player, bus);
        if (videosink != NULL && GST_IS_ELEMENT (videosink)) {
            g_object_set (G_OBJECT (player->playbin), "video-sink", videosink, NULL);
            player->video_display_context_type = BP_VIDEO_DISPLAY_CONTEXT_CUSTOM;
            return;
        }
    }
    
    #if defined(GDK_WINDOWING_X11) || defined(GDK_WINDOWING_WIN32)

    player->video_display_context_type = BP_VIDEO_DISPLAY_CONTEXT_GDK_WINDOW;
    
    videosink = gst_element_factory_make ("gconfvideosink", "videosink");
    if (videosink == NULL) {
        videosink = gst_element_factory_make ("autovideosink", "videosink");
        if (videosink == NULL) {
            player->video_display_context_type = BP_VIDEO_DISPLAY_CONTEXT_UNSUPPORTED;
            videosink = gst_element_factory_make ("fakesink", "videosink");
            if (videosink != NULL) {
                g_object_set (G_OBJECT (videosink), "sync", TRUE, NULL);
            }
        }
    }
    
    g_object_set (G_OBJECT (player->playbin), "video-sink", videosink, NULL);
    
    gst_bus_set_sync_handler (bus, gst_bus_sync_signal_handler, player);
    g_signal_connect (bus, "sync-message::element", G_CALLBACK (bp_video_bus_element_sync_message), player);
        
    if (GST_IS_BIN (videosink)) {
        g_signal_connect (videosink, "element-added", G_CALLBACK (bp_video_sink_element_added), player);
    }
    
    #else
    
    player->video_display_context_type = BP_VIDEO_DISPLAY_CONTEXT_UNSUPPORTED;

    #ifndef WIN32

    videosink = gst_element_factory_make ("fakesink", "videosink");
    if (videosink != NULL) {
        g_object_set (G_OBJECT (videosink), "sync", TRUE, NULL);
    }
    
    g_object_set (G_OBJECT (player->playbin), "video-sink", videosink, NULL);

    #endif
    
    #endif

    if (player->video_prepare_window_cb != NULL) {
        player->video_prepare_window_cb (player);
    }
}
static GstPadProbeReturn
pad_block_cb (GstPad *srcPad, GstPadProbeInfo *info, gpointer user_data)
{
    BansheePlayer* player;

    player = (BansheePlayer*) user_data;
    g_return_val_if_fail (IS_BANSHEE_PLAYER (player), GST_PAD_PROBE_OK);

    // The pad_block_cb can get triggered multiple times, on different threads.
    // Lock around the link/unlink code, so we don't end up going through here
    // with inconsistent state.
    g_mutex_lock (player->replaygain_mutex);

    if ((player->replaygain_enabled && player->rgvolume_in_pipeline) ||
        (!player->replaygain_enabled && !player->rgvolume_in_pipeline)) {
        // The pipeline is already in the correct state.  Unblock the pad, and return.
        player->rg_pad_block_id = 0;
        g_mutex_unlock (player->replaygain_mutex);
        return GST_PAD_PROBE_REMOVE;
    }

    if (player->rgvolume_in_pipeline) {
        gst_element_unlink (player->before_rgvolume, player->rgvolume);
        gst_element_unlink (player->rgvolume, player->after_rgvolume);
    } else {
        gst_element_unlink (player->before_rgvolume, player->after_rgvolume);
    }

    if (player->replaygain_enabled) {
        player->rgvolume = _bp_rgvolume_new (player);
        if (!GST_IS_ELEMENT (player->rgvolume)) {
            player->replaygain_enabled = FALSE;
        }
    } else {
        gst_element_set_state (player->rgvolume, GST_STATE_NULL);
        gst_bin_remove (GST_BIN (player->audiobin), player->rgvolume);
    }

    if (player->replaygain_enabled && GST_IS_ELEMENT (player->rgvolume)) {
        g_signal_connect (player->rgvolume, "notify::target-gain", G_CALLBACK (on_target_gain_changed), player);
        gst_bin_add (GST_BIN (player->audiobin), player->rgvolume);
        gst_element_sync_state_with_parent (player->rgvolume);

        // link in rgvolume and connect to the real audio sink
        gst_element_link (player->before_rgvolume, player->rgvolume);
        gst_element_link (player->rgvolume, player->after_rgvolume);
        player->rgvolume_in_pipeline = TRUE;
    } else {
        // link the queue with the real audio sink
        gst_element_link (player->before_rgvolume, player->after_rgvolume);
        player->rgvolume_in_pipeline = FALSE;
    }

    // Our state is now consistent
    player->rg_pad_block_id = 0;
    g_mutex_unlock (player->replaygain_mutex);

    _bp_rgvolume_print_volume (player);

    return GST_PAD_PROBE_REMOVE;
}
Exemplo n.º 14
0
GdkPixbuf *
xplayer_gst_playbin_get_frame (GstElement *play)
{
  GstStructure *s;
  GstSample *sample = NULL;
  GdkPixbuf *pixbuf = NULL;
  GstCaps *to_caps, *sample_caps;
  gint outwidth = 0;
  gint outheight = 0;
  GstMemory *memory;
  GstMapInfo info;
  GdkPixbufRotation rotation = GDK_PIXBUF_ROTATE_NONE;

  g_return_val_if_fail (play != NULL, NULL);
  g_return_val_if_fail (GST_IS_ELEMENT (play), NULL);

  /* our desired output format (RGB24) */
  to_caps = gst_caps_new_simple ("video/x-raw",
      "format", G_TYPE_STRING, "RGB",
      /* Note: we don't ask for a specific width/height here, so that
       * videoscale can adjust dimensions from a non-1/1 pixel aspect
       * ratio to a 1/1 pixel-aspect-ratio. We also don't ask for a
       * specific framerate, because the input framerate won't
       * necessarily match the output framerate if there's a deinterlacer
       * in the pipeline. */
      "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
      NULL);

  /* get frame */
  g_signal_emit_by_name (play, "convert-sample", to_caps, &sample);
  gst_caps_unref (to_caps);

  if (!sample) {
    GST_DEBUG ("Could not take screenshot: %s",
        "failed to retrieve or convert video frame");
    g_warning ("Could not take screenshot: %s",
        "failed to retrieve or convert video frame");
    return NULL;
  }

  sample_caps = gst_sample_get_caps (sample);
  if (!sample_caps) {
    GST_DEBUG ("Could not take screenshot: %s", "no caps on output buffer");
    g_warning ("Could not take screenshot: %s", "no caps on output buffer");
    return NULL;
  }

  GST_DEBUG ("frame caps: %" GST_PTR_FORMAT, sample_caps);

  s = gst_caps_get_structure (sample_caps, 0);
  gst_structure_get_int (s, "width", &outwidth);
  gst_structure_get_int (s, "height", &outheight);
  if (outwidth <= 0 || outheight <= 0)
    goto done;

  memory = gst_buffer_get_memory (gst_sample_get_buffer (sample), 0);
  gst_memory_map (memory, &info, GST_MAP_READ);

  /* create pixbuf from that - use our own destroy function */
  pixbuf = gdk_pixbuf_new_from_data (info.data,
      GDK_COLORSPACE_RGB, FALSE, 8, outwidth, outheight,
      GST_ROUND_UP_4 (outwidth * 3), destroy_pixbuf, sample);

  gst_memory_unmap (memory, &info);

done:
  if (!pixbuf) {
    GST_DEBUG ("Could not take screenshot: %s", "could not create pixbuf");
    g_warning ("Could not take screenshot: %s", "could not create pixbuf");
    gst_sample_unref (sample);
  }

  /* Did we check whether we need to rotate the video? */
  if (g_object_get_data (G_OBJECT (play), "orientation-checked") == NULL) {
    GstTagList *tags = NULL;

    g_signal_emit_by_name (G_OBJECT (play), "get-video-tags", 0, &tags);
    if (tags) {
      char *orientation_str;
      gboolean ret;

      ret = gst_tag_list_get_string_index (tags, GST_TAG_IMAGE_ORIENTATION, 0, &orientation_str);
      if (!ret || !orientation_str)
        rotation = GDK_PIXBUF_ROTATE_NONE;
      else if (g_str_equal (orientation_str, "rotate-90"))
        rotation = GDK_PIXBUF_ROTATE_CLOCKWISE;
      else if (g_str_equal (orientation_str, "rotate-180"))
        rotation = GDK_PIXBUF_ROTATE_UPSIDEDOWN;
      else if (g_str_equal (orientation_str, "rotate-270"))
        rotation = GDK_PIXBUF_ROTATE_COUNTERCLOCKWISE;

      gst_tag_list_unref (tags);
    }

    g_object_set_data (G_OBJECT (play), "orientation-checked", GINT_TO_POINTER(1));
    g_object_set_data (G_OBJECT (play), "orientation", GINT_TO_POINTER(rotation));
  }

  rotation = GPOINTER_TO_INT (g_object_get_data (G_OBJECT (play), "orientation"));
  if (rotation != GDK_PIXBUF_ROTATE_NONE) {
    GdkPixbuf *rotated;

    rotated = gdk_pixbuf_rotate_simple (pixbuf, rotation);
    if (rotated) {
      g_object_unref (pixbuf);
      pixbuf = rotated;
    }
  }

  return pixbuf;
}
Exemplo n.º 15
0
void
handle_dbus_message(DBusMessage *msg)
{
    int state;
    char **new_uris;
    int uri_count;
    GstState pipeline_state;
    int hemp_state;
    int i;
    int new_head = 0; /* is there a new hotlist head?
                         If so, play means skip */

    if (!strncmp (HEMP_DBUS_DESTINATION, dbus_message_get_destination(msg),
                  strlen(HEMP_DBUS_DESTINATION))) {
        dbus_message_get_args (msg, NULL,
                               DBUS_TYPE_INT32, &state,
                               DBUS_TYPE_ARRAY, DBUS_TYPE_STRING,
                               &new_uris, &uri_count,
                               DBUS_TYPE_INVALID);
        for (i = 0; i<uri_count; i++) {
            g_queue_push_head(hotlist, 
                              new_playlist_entry(new_uris[i]));
            new_head = 1; 
        }
        if (uri_count) write_playlist();

        if (!GST_IS_ELEMENT(pipeline)) init_pipeline();
        gst_element_get_state(GST_ELEMENT (pipeline),
                              &pipeline_state,
                              NULL,
                              GST_CLOCK_TIME_NONE);
        if (pipeline_state == GST_STATE_PAUSED) {
            hemp_state = HEMP_STATE_PAUSED;
        } else {
            hemp_state = HEMP_STATE_PLAYING;
        }

        switch (state) {
            case HEMP_STATE_TOGGLE:
                if (pipeline_state == GST_STATE_PLAYING) {
                    gst_element_set_state (GST_ELEMENT (pipeline),
                                           GST_STATE_PAUSED);
                } else {
                    gst_element_set_state (GST_ELEMENT (pipeline),
                                           GST_STATE_PLAYING);
                }
                break;
            case HEMP_STATE_SKIP:
                next(hemp_state, 1);
                break;
            case HEMP_STATE_PREVIOUS:
                previous(hemp_state);
                break;
            case HEMP_STATE_PLAYING:
                if (new_head) {
                    next(HEMP_STATE_PLAYING, 0);
                } else {
                    gst_element_set_state (GST_ELEMENT (pipeline),
                                           GST_STATE_PLAYING);
                }
                break;
            case HEMP_STATE_PAUSED:
                gst_element_set_state (GST_ELEMENT (pipeline),
                                       GST_STATE_PAUSED);
                break;
            case HEMP_STATE_STOP:
                drop_pipeline(0);
                break;
            case HEMP_STATE_PING:
                printf("ping!\n");
                break;
            default:
                printf("Unknown state %d\n", state);
       }
    }
}
static gboolean
gst_validate_element_monitor_do_setup (GstValidateMonitor * monitor)
{
  GstIterator *iterator;
  gboolean done;
  GstPad *pad;
  GstValidateElementMonitor *elem_monitor;
  GstElement *element;
  GstObject *target = gst_validate_monitor_get_target (monitor);

  if (!GST_IS_ELEMENT (target)) {
    gst_object_unref (target);
    GST_WARNING_OBJECT (monitor, "Trying to create element monitor with other "
        "type of object");
    return FALSE;
  }

  elem_monitor = GST_VALIDATE_ELEMENT_MONITOR_CAST (monitor);

  GST_DEBUG_OBJECT (monitor, "Setting up monitor for element %" GST_PTR_FORMAT,
      target);
  element = GST_ELEMENT_CAST (target);

  if (g_object_get_data ((GObject *) element, "validate-monitor")) {
    GST_DEBUG_OBJECT (elem_monitor,
        "Pad already has a validate-monitor associated");
    gst_object_unref (target);
    return FALSE;
  }

  gst_validate_element_monitor_inspect (elem_monitor);

  elem_monitor->pad_added_id = g_signal_connect (element, "pad-added",
      G_CALLBACK (_validate_element_pad_added), monitor);

  iterator = gst_element_iterate_pads (element);
  done = FALSE;
  while (!done) {
    GValue value = { 0, };

    switch (gst_iterator_next (iterator, &value)) {
      case GST_ITERATOR_OK:
        pad = g_value_get_object (&value);
        gst_validate_element_monitor_wrap_pad (elem_monitor, pad);
        g_value_reset (&value);
        break;
      case GST_ITERATOR_RESYNC:
        /* TODO how to handle this? */
        gst_iterator_resync (iterator);
        break;
      case GST_ITERATOR_ERROR:
        done = TRUE;
        break;
      case GST_ITERATOR_DONE:
        done = TRUE;
        break;
    }
  }
  gst_iterator_free (iterator);
  gst_object_unref (target);

  set_config_properties (monitor, element);

  return TRUE;
}
Exemplo n.º 17
0
int
main (int argc, gchar * argv[])
{
  GstElement *element;
  GstElement *element2;
  GstPad *pad;
  long usage1;
  gint i, iters;

  gst_init (&argc, &argv);

  if (argc == 2)
    iters = atoi (argv[1]);
  else
    iters = ITERS;

  g_print ("starting element with pad test with %d iterations\n", iters);
  usage1 = vmsize ();

  element = gst_element_factory_make ("fakesink", NULL);;
  g_assert (GST_IS_ELEMENT (element));
  pad = gst_element_get_pad (element, "sink");
  g_assert (GST_IS_PAD (pad));
  g_assert (GST_OBJECT_IS_FLOATING (element));
  g_assert (!GST_OBJECT_IS_FLOATING (pad));
  g_assert (gst_pad_get_parent (pad) == element);
  gst_object_unref (element);
  g_print ("create/addpad/unref 1 new element: %ld\n", vmsize () - usage1);

  for (i = 0; i < iters; i++) {
    element = gst_element_factory_make ("fakesink", NULL);;
    g_assert (GST_IS_ELEMENT (element));
    gst_object_unref (element);
  }
  g_print ("create/unref %d elements: %ld\n", iters, vmsize () - usage1);

  for (i = 0; i < iters / 2; i++) {
    element = gst_element_factory_make ("fakesink", NULL);
    g_assert (GST_IS_ELEMENT (element));
    element2 = gst_element_factory_make ("fakesrc", NULL);
    g_assert (GST_IS_ELEMENT (element2));
    gst_element_link_pads (element2, "src", element, "sink");
    g_assert (GST_PAD_IS_LINKED (gst_element_get_pad (element2, "src")));
    g_assert (GST_PAD_IS_LINKED (gst_element_get_pad (element, "sink")));
    gst_object_unref (element);
    g_assert (!GST_PAD_IS_LINKED (gst_element_get_pad (element2, "src")));
    gst_object_unref (element2);
  }
  g_print ("create/link/unref %d element duos: %ld\n", iters / 2,
      vmsize () - usage1);

  element = gst_element_factory_make ("fakesink", NULL);;
  g_assert (GST_IS_ELEMENT (element));
  pad = gst_element_get_pad (element, "sink");
  g_assert (GST_IS_PAD (pad));
  gst_element_remove_pad (element, pad);
  g_assert (gst_element_get_pad (element, "sink") == NULL);
  gst_object_unref (element);

  g_print ("pad removal on one element: %ld\n", vmsize () - usage1);

  for (i = 0; i < iters / 2; i++) {
    element = gst_element_factory_make ("fakesink", NULL);;
    g_assert (GST_IS_ELEMENT (element));
    pad = gst_element_get_pad (element, "sink");
    g_assert (GST_IS_PAD (pad));
    gst_element_remove_pad (element, pad);
    g_assert (gst_element_get_pad (element, "sink") == NULL);
    gst_object_unref (element);
  }
  g_print ("pad removal loop on %d elements: %ld\n", iters / 2,
      vmsize () - usage1);

  for (i = 0; i < iters / 2; i++) {
    element = gst_element_factory_make ("fakesink", NULL);;
    g_assert (GST_IS_ELEMENT (element));
    pad = gst_element_get_pad (element, "sink");
    g_assert (GST_IS_PAD (pad));
    gst_object_ref (pad);
    gst_element_remove_pad (element, pad);
    g_assert (gst_pad_get_parent (pad) == NULL);
    gst_object_unref (pad);
    gst_object_unref (element);
  }
  g_print ("pad ref/removal/test loop on %d elements: %ld\n", iters / 2,
      vmsize () - usage1);

  element = gst_element_factory_make ("fakesink", NULL);;
  g_assert (GST_IS_ELEMENT (element));
  pad = gst_element_get_pad (element, "sink");
  g_assert (GST_IS_PAD (pad));
  gst_object_unref (element);

  g_print ("pad unref on one element: %ld\n", vmsize () - usage1);

  for (i = 0; i < iters / 2; i++) {
    element = gst_element_factory_make ("fakesink", NULL);
    g_assert (GST_IS_ELEMENT (element));
    pad = gst_element_get_pad (element, "sink");
    g_assert (GST_IS_PAD (pad));
    gst_object_unref (element);
  }
  g_print ("pad unref loop on %d elements: %ld\n", iters / 2,
      vmsize () - usage1);

  g_print ("leaked: %ld\n", vmsize () - usage1);

  return 0;
}
Exemplo n.º 18
0
static FsRtpSpecialSource *
fs_rtp_special_source_new (FsRtpSpecialSourceClass *klass,
    GList **negotiated_codec_associations,
    GMutex *mutex,
    FsCodec *selected_codec,
    GstElement *bin,
    GstElement *rtpmuxer)
{
  FsRtpSpecialSource *source = NULL;
  GstPad *pad = NULL;

  g_return_val_if_fail (klass, NULL);
  g_return_val_if_fail (klass->build, NULL);
  g_return_val_if_fail (GST_IS_BIN (bin), NULL);
  g_return_val_if_fail (GST_IS_ELEMENT (rtpmuxer), NULL);

  source = g_object_new (G_OBJECT_CLASS_TYPE (klass),
      NULL);
  g_return_val_if_fail (source, NULL);

  g_mutex_lock (mutex);

  source->priv->rtpmuxer = gst_object_ref (rtpmuxer);
  source->priv->outer_bin = gst_object_ref (bin);
  source->priv->src = klass->build (source, *negotiated_codec_associations,
      selected_codec);

  g_mutex_unlock (mutex);

  if (!source->priv->src)
    goto error;

  if (!gst_bin_add (GST_BIN (source->priv->outer_bin), source->priv->src))
  {
    GST_ERROR ("Could not add bin to outer bin");
    gst_object_unref (source->priv->src);
    source->priv->src = NULL;
    goto error;
  }

  source->priv->muxer_request_pad = gst_element_get_request_pad (rtpmuxer,
      "priority_sink_%d");
  if (!source->priv->muxer_request_pad)
    source->priv->muxer_request_pad = gst_element_get_request_pad (rtpmuxer,
        "sink_%d");

  if (!source->priv->muxer_request_pad)
  {
    GST_ERROR ("Could not get request pad from muxer");
    goto error_added;
  }

  pad = gst_element_get_static_pad (source->priv->src, "src");

  if (GST_PAD_LINK_FAILED (gst_pad_link (pad, source->priv->muxer_request_pad)))
  {
    GST_ERROR ("Could not link rtpdtmfsrc src to muxer sink");
    gst_object_unref (pad);
    goto error_added;
  }
  gst_object_unref (pad);

  if (!gst_element_sync_state_with_parent (source->priv->src))
  {
    GST_ERROR ("Could not sync capsfilter state with its parent");
    goto error_added;
  }

  return source;

 error_added:
  gst_element_set_state (source->priv->src, GST_STATE_NULL);
  gst_bin_remove (GST_BIN (source->priv->outer_bin), source->priv->src);
  source->priv->src = NULL;

 error:
  g_object_unref (source);

  return NULL;
}