Esempio n. 1
0
bool GstShow::finish_pipeline()
{

    GError *err = NULL;
    GstStateChangeReturn stateret;

    for (int i = 0; i<2; i++)
    {
        if(!gst_element_link_many(queue[i], scale[i], scalefilter[i], mixer, NULL)) {
                L_(lerror) << ("GStreamer: cannot link elements in finish\n");
                return false;
        }
    }
    
    if(!gst_element_link(mixer , videosink)) {
            L_(lerror) << ("GStreamer: cannot link mixer to sink\n");
            return false;
    }
    
    
    // prepare the pipeline
    
    stateret = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
    if(stateret  == GST_STATE_CHANGE_FAILURE) {
        handleMessage(pipeline);
        L_(lerror) << ( "GStreamer: cannot put pipeline to play\n");
    }

    handleMessage(pipeline);

    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline");

    return true;    
}
Esempio n. 2
0
void owr_dump_dot_file(const gchar *base_filename)
{
    g_return_if_fail(owr_pipeline);
    g_return_if_fail(base_filename);

    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(owr_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, base_filename);
}
Esempio n. 3
0
static void
gve_error_msg (GstVideoEditor * gve, GstMessage * msg)
{
  GError *err = NULL;
  gchar *dbg = NULL;

  gst_message_parse_error (msg, &err, &dbg);
  GST_ERROR ("message = %s", GST_STR_NULL (err->message));
  GST_ERROR ("domain  = %d (%s)", err->domain,
      GST_STR_NULL (g_quark_to_string (err->domain)));
  GST_ERROR ("code    = %d", err->code);
  GST_ERROR ("debug   = %s", GST_STR_NULL (dbg));
  GST_ERROR ("source  = %" GST_PTR_FORMAT, msg->src);

  g_message ("Error: %s\n%s\n", GST_STR_NULL (err->message),
      GST_STR_NULL (dbg));
  GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(gve->priv->main_pipeline),
      GST_DEBUG_GRAPH_SHOW_ALL, "longomatch-editor-error");
  g_signal_emit (gve, gve_signals[SIGNAL_ERROR], 0, err->message);
  g_error_free (err);
  g_free (dbg);
}
Esempio n. 4
0
void
totem_gst_message_print (GstMessage *msg,
			 GstElement *play,
			 const char *filename)
{
  GError *err = NULL;
  char *dbg = NULL;

  g_return_if_fail (GST_MESSAGE_TYPE (msg) == GST_MESSAGE_ERROR);

  if (play != NULL) {
    g_return_if_fail (filename != NULL);

    GST_DEBUG_BIN_TO_DOT_FILE (GST_BIN_CAST (play),
			       GST_DEBUG_GRAPH_SHOW_ALL ^ GST_DEBUG_GRAPH_SHOW_NON_DEFAULT_PARAMS,
			       filename);
  }

  gst_message_parse_error (msg, &err, &dbg);
  if (err) {
    char *uri;

    g_object_get (play, "uri", &uri, NULL);
    GST_ERROR ("message = %s", GST_STR_NULL (err->message));
    GST_ERROR ("domain  = %d (%s)", err->domain,
        GST_STR_NULL (g_quark_to_string (err->domain)));
    GST_ERROR ("code    = %d", err->code);
    GST_ERROR ("debug   = %s", GST_STR_NULL (dbg));
    GST_ERROR ("source  = %" GST_PTR_FORMAT, msg->src);
    GST_ERROR ("uri     = %s", GST_STR_NULL (uri));
    g_free (uri);

    g_message ("Error: %s\n%s\n", GST_STR_NULL (err->message),
        GST_STR_NULL (dbg));

    g_error_free (err);
  }
  g_free (dbg);
}
Esempio n. 5
0
gint
main (gint argc, gchar * argv[])
{
  GstBin *bin;
  GstClockTime start, end;
  GstElement *sink, *new_sink;

  /* default parameters */
  gint depth = 4;
  gint children = 3;
  gint flavour = FLAVOUR_AUDIO;
  const gchar *flavour_str = "audio";

  gst_init (&argc, &argv);

  /* check command line options */
  if (argc) {
    gint arg;
    for (arg = 0; arg < argc; arg++) {
      if (!strcmp (argv[arg], "-d")) {
        arg++;
        if (arg < argc)
          depth = atoi (argv[arg]);
      } else if (!strcmp (argv[arg], "-c")) {
        arg++;
        if (arg < argc)
          children = atoi (argv[arg]);
      } else if (!strcmp (argv[arg], "-f")) {
        arg++;
        if (arg < argc) {
          flavour_str = argv[arg];
          switch (*flavour_str) {
            case 'a':
              flavour = FLAVOUR_AUDIO;
              break;
            case 'v':
              flavour = FLAVOUR_VIDEO;
              break;
            default:
              break;
          }
        }
      }
    }
  }

  /* build pipeline */
  g_print ("building %s pipeline with depth = %d and children = %d\n",
      flavour_str, depth, children);
  start = gst_util_get_timestamp ();
  bin = GST_BIN (gst_pipeline_new ("pipeline"));
  sink = gst_element_factory_make ("fakesink", NULL);
  gst_bin_add (bin, sink);
  if (!create_node (bin, sink, "sink", &new_sink, children, flavour)) {
    goto Error;
  }
  if (!create_nodes (bin, new_sink, depth, children, flavour)) {
    goto Error;
  }
  end = gst_util_get_timestamp ();
  /* num-threads = num-sources = pow (children, depth) */
  g_print ("%" GST_TIME_FORMAT " built pipeline with %d elements\n",
      GST_TIME_ARGS (end - start), GST_BIN_NUMCHILDREN (bin));

  /* meassure */
  g_print ("starting pipeline\n");
  gst_element_set_state (GST_ELEMENT (bin), GST_STATE_READY);
  GST_DEBUG_BIN_TO_DOT_FILE (bin, GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE, "capsnego");
  start = gst_util_get_timestamp ();
  gst_element_set_state (GST_ELEMENT (bin), GST_STATE_PAUSED);
  event_loop (GST_ELEMENT (bin), start);
  end = gst_util_get_timestamp ();
  g_print ("%" GST_TIME_FORMAT " reached paused\n",
      GST_TIME_ARGS (end - start));

  /* clean up */
Error:
  gst_element_set_state (GST_ELEMENT (bin), GST_STATE_NULL);
  gst_object_unref (bin);
  return 0;
}
Esempio n. 6
0
static gboolean 
do_switch (GstElement * pipeline)
{
  int other_channel;
  GstElement *select;
  GstElement *aselect;
  GstStateChangeReturn ret;
  gchar *name;
  gchar *othername;
  GstPad *pad;
  GstPad *apad;
  GstPad *otherPad;
  GstPad *aotherPad;
  gint64 v_stoptime, a_stoptime;
  gint64 v_starttime, a_starttime;
  gint64 v_runningtime, a_runningtime;
  gint64 starttime, stoptime;

  GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "foo");


  /* find the selector */
  select = gst_bin_get_by_name (GST_BIN (pipeline), "selector");
  aselect = gst_bin_get_by_name (GST_BIN (pipeline), "aselector");

  if (!select) {
     g_print("Input selector not found\n");
  }
  if (!aselect) {
     g_print("Audio input selector not found\n");
  }

  /* get the named pad */
//  name = g_strdup_printf ("sink%d", active_channel);
  othername = g_strdup_printf ("sink%d", other_channel);
 
  pad = gst_element_get_static_pad (select, name);
  otherPad = gst_element_get_static_pad (select, othername);
  apad = gst_element_get_static_pad (aselect, name);
  aotherPad = gst_element_get_static_pad (aselect, othername);

  if (!pad) {
     g_print("Input selector pad %s not found\n", name);
  }
  if (!apad) {
     g_print("Audio Input selector pad %s not found\n", name);
  }
  if (!otherPad) {
     g_print("Input selector pad %s not found\n", othername);
  }
  if (!aotherPad) {
     g_print("Input selector pad %s not found\n", othername);
  }

  /* set the active pad */

  g_signal_emit_by_name (select, "block", &v_stoptime);
  g_signal_emit_by_name (aselect, "block", &a_stoptime);

  if (v_stoptime > a_stoptime) {
     stoptime = v_stoptime;
  } else {
     stoptime = a_stoptime;
  }

//  Need to figure this out stil

//  gst_pad_get_property (otherPad, "running-time", &v_runningtime);
//  gst_pad_get_property (aotherPad, (const char *)"running-time", &a_runningtime);
  

//  if (v_runningtime > a_runningtime) {
//     stoptime = a_runningtime;
//  } else {
//     stoptime = v_runningtime;
//  } 

  g_signal_emit_by_name (select, "switch", pad, stoptime, -1);
  g_signal_emit_by_name (aselect, "switch", apad, stoptime,-1);

  g_free (name);

  return TRUE;
}
Esempio n. 7
0
static void
gve_bus_message_cb (GstBus * bus, GstMessage * message, gpointer data)
{
  GstVideoEditor *gve = (GstVideoEditor *) data;
  GstMessageType msg_type;

  g_return_if_fail (gve != NULL);
  g_return_if_fail (GST_IS_VIDEO_EDITOR (gve));

  msg_type = GST_MESSAGE_TYPE (message);

  switch (msg_type) {
    case GST_MESSAGE_ERROR:
      gve_error_msg (gve, message);
      if (gve->priv->main_pipeline)
        gst_element_set_state (gve->priv->main_pipeline, GST_STATE_NULL);
      break;
    case GST_MESSAGE_WARNING:
      GST_WARNING ("Warning message: %" GST_PTR_FORMAT, message);
      break;

    case GST_MESSAGE_STATE_CHANGED:
    {
      GstState old_state, new_state;
      gchar *src_name;

      gst_message_parse_state_changed (message, &old_state, &new_state, NULL);

      if (old_state == new_state)
        break;

      /* we only care about playbin (pipeline) state changes */
      if (GST_MESSAGE_SRC (message) != GST_OBJECT (gve->priv->main_pipeline))
        break;

      src_name = gst_object_get_name (message->src);

      GST_INFO ("%s changed state from %s to %s", src_name,
          gst_element_state_get_name (old_state),
          gst_element_state_get_name (new_state));
      g_free (src_name);

      if (new_state == GST_STATE_PLAYING)
        gve_set_tick_timeout (gve, TIMEOUT);
      if (old_state == GST_STATE_PAUSED && new_state == GST_STATE_READY) {
        if (gve->priv->update_id > 0) {
          g_source_remove (gve->priv->update_id);
          gve->priv->update_id = 0;
        }
      }
      if (old_state == GST_STATE_NULL && new_state == GST_STATE_READY)
        GST_DEBUG_BIN_TO_DOT_FILE (GST_BIN (gve->priv->main_pipeline),
            GST_DEBUG_GRAPH_SHOW_ALL, "longomatch-editor-null-to-ready");
      if (old_state == GST_STATE_READY && new_state == GST_STATE_PAUSED)
        GST_DEBUG_BIN_TO_DOT_FILE (GST_BIN (gve->priv->main_pipeline),
            GST_DEBUG_GRAPH_SHOW_ALL, "longomatch-editor-ready-to-paused");
      break;
    }
    case GST_MESSAGE_EOS:
      if (gve->priv->update_id > 0) {
        g_source_remove (gve->priv->update_id);
        gve->priv->update_id = 0;
      }
      gst_element_set_state (gve->priv->main_pipeline, GST_STATE_NULL);
      g_signal_emit (gve, gve_signals[SIGNAL_PERCENT_COMPLETED], 0, (gfloat) 1);
      /* Close file sink properly */
      g_object_set (G_OBJECT (gve->priv->file_sink), "location", "", NULL);
      break;
    default:
      GST_LOG ("Unhandled message: %" GST_PTR_FORMAT, message);
      break;
  }
}
static gboolean
do_switch (GstElement * pipeline)
{
  GstElement *select;
  GstElement *aselect;
  GstStateChangeReturn ret;
  gchar *name;
  GstPad *pad;
  GstPad *apad;
  gint64 v_stoptime, a_stoptime;
  gint64 starttime, stoptime;

  active_channel = active_channel ? 0 : 1;

  GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "foo");


  /* find the selector */
  select = gst_bin_get_by_name (GST_BIN (pipeline), "selector");
  aselect = gst_bin_get_by_name (GST_BIN (pipeline), "aselector");

  if (!select) {
     g_print("Input selector not found\n");
  }
  if (!aselect) {
     g_print("Audio input selector not found\n");
  }

  /* get the named pad */
  name = g_strdup_printf ("sink%d", active_channel);
//  g_print ("switching to pad %s\n", name);
 
  pad = gst_element_get_static_pad (select, name);
  apad = gst_element_get_static_pad (aselect, name);

  if (!pad) {
     g_print("Input selector pad %s not found\n", name);
  }
  if (!apad) {
     g_print("Audio Input selector pad %s not found\n", name);
  }

  /* set the active pad */

  g_signal_emit_by_name (select, "block", &v_stoptime);
  g_signal_emit_by_name (aselect, "block", &a_stoptime);

  if (v_stoptime > a_stoptime) {
     stoptime = a_stoptime;
     starttime = v_stoptime;
  } else {
     stoptime = v_stoptime;
     starttime = a_stoptime;
  }
  
  g_signal_emit_by_name (select, "switch", pad, stoptime, starttime);
  g_signal_emit_by_name (aselect, "switch", apad, stoptime, starttime);

  g_free (name);



  return TRUE;
}
Esempio n. 9
0
BOOL tsmf_gstreamer_pipeline_build(TSMFGstreamerDecoder* mdecoder)
{
	const char* appsrc = "appsrc name=source ! decodebin name=decoder !";
	const char* video = "autovideoconvert ! videoscale !";
	const char* audio = "audioconvert ! audiorate ! audioresample ! volume name=audiovolume !";
	char pipeline[1024];

	if (!mdecoder)
		return FALSE;

	/* TODO: Construction of the pipeline from a string allows easy overwrite with arguments.
	 *       The only fixed elements necessary are appsrc and the volume element for audio streams.
	 *       The rest could easily be provided in gstreamer pipeline notation from command line. */
	if (mdecoder->media_type == TSMF_MAJOR_TYPE_VIDEO)
		snprintf(pipeline, sizeof(pipeline), "%s %s %s name=outsink", appsrc, video, tsmf_platform_get_video_sink());
	else
		snprintf(pipeline, sizeof(pipeline), "%s %s %s name=outsink", appsrc, audio, tsmf_platform_get_audio_sink());

	DEBUG_TSMF("pipeline=%s", pipeline);
	mdecoder->pipe = gst_parse_launch(pipeline, NULL);

	if (!mdecoder->pipe)
	{
		WLog_ERR(TAG, "Failed to create new pipe");
		return FALSE;
	}

	mdecoder->src = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "source");

	if (!mdecoder->src)
	{
		WLog_ERR(TAG, "Failed to get appsrc");
		return FALSE;
	}

	mdecoder->outsink = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "outsink");

	if (!mdecoder->outsink)
	{
		WLog_ERR(TAG, "Failed to get sink");
		return FALSE;
	}

	if (mdecoder->media_type != TSMF_MAJOR_TYPE_VIDEO)
	{
		mdecoder->volume = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "audiovolume");

		if (!mdecoder->volume)
		{
			WLog_ERR(TAG, "Failed to get volume");
			return FALSE;
		}
	}

	tsmf_platform_register_handler(mdecoder);
	/* AppSrc settings */
	GstAppSrcCallbacks callbacks =
	{
		tsmf_gstreamer_need_data,
		tsmf_gstreamer_enough_data,
		tsmf_gstreamer_seek_data
	};
	g_object_set(mdecoder->src, "format", GST_FORMAT_TIME, NULL);
	g_object_set(mdecoder->src, "is-live", TRUE, NULL);
	g_object_set(mdecoder->src, "block", TRUE, NULL);
	gst_app_src_set_caps((GstAppSrc *) mdecoder->src, mdecoder->gst_caps);
	gst_app_src_set_callbacks((GstAppSrc *)mdecoder->src, &callbacks, mdecoder, NULL);
	gst_app_src_set_stream_type((GstAppSrc *) mdecoder->src, GST_APP_STREAM_TYPE_SEEKABLE);
	tsmf_window_create(mdecoder);
	tsmf_gstreamer_pipeline_set_state(mdecoder, GST_STATE_READY);
	tsmf_gstreamer_pipeline_set_state(mdecoder, GST_STATE_PLAYING);
	mdecoder->pipeline_start_time_valid = 0;
	mdecoder->shutdown = 0;

	GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(mdecoder->pipe), GST_DEBUG_GRAPH_SHOW_ALL, get_type(mdecoder));

	return TRUE;
}
Esempio n. 10
0
static void
demux_pad_added_cb (GstElement * element, GstPad * demuxpad, App * app)
{
    GstPad *parser_sinkpad = NULL, *parser_srcpad = NULL, *queue_sinkpad = NULL, *queue_srcpad = NULL, *mux_sinkpad = NULL;
    GstStructure *s;
    GstCaps *caps = gst_pad_get_caps (demuxpad);

    gchar *demuxpadname, sinkpadname[10], srcpadname[9];
    guint sourcepid;
    int i, ret;

    s = gst_caps_get_structure (caps, 0);
    demuxpadname = gst_pad_get_name (demuxpad);
    GST_DEBUG ("demux_pad_added_cb %s:%s", GST_DEBUG_PAD_NAME(demuxpad));

    if (g_ascii_strncasecmp (demuxpadname, "video", 5) == 0) {
        sscanf (demuxpadname + 6, "%x", &sourcepid);
        if (app->auto_pids) {
            app->a_source_pids[0] = sourcepid;
            if (app->a_sink_pids[0] == -1)
            {
                app->a_sink_pids[0] = sourcepid;
                app->no_sink_pids++;
            }
            app->no_source_pids++;
        }
        if (sourcepid == app->a_source_pids[0] && app->videoparser == NULL) {
            if (gst_structure_has_name (s, "video/mpeg")) {
                app->videoparser = gst_element_factory_make ("mpegvideoparse", "videoparse");
                if (!app->videoparser) {
                    bdremux_errout("mpegvideoparse not found! please install gst-plugin-mpegvideoparse!");
                }
            }
            else if (gst_structure_has_name (s, "video/x-h264")) {
                app->videoparser = gst_element_factory_make ("h264parse", "videoparse");
                if (!app->videoparser) {
                    bdremux_errout("h264parse not found! please install gst-plugin-videoparsersbad!");
                }
            }
            gst_bin_add (GST_BIN (app->pipeline), app->videoparser);
            gst_element_set_state (app->videoparser, GST_STATE_PLAYING);
            parser_sinkpad = gst_element_get_static_pad (app->videoparser, "sink");
            parser_srcpad = gst_element_get_static_pad (app->videoparser, "src");
            g_sprintf (sinkpadname, "sink%d", app->a_sink_pids[0]);
            g_sprintf (srcpadname, "src%d", app->a_sink_pids[0]);
            queue_sinkpad = gst_element_get_request_pad (app->queue, sinkpadname);
            queue_srcpad = gst_element_get_static_pad(app->queue, srcpadname);
            g_sprintf (sinkpadname, "sink_%d", app->a_sink_pids[0]);
            mux_sinkpad = gst_element_get_request_pad (app->m2tsmux, sinkpadname);
            app->requested_pid_count++;
            if (app->requested_pid_count <= app->no_source_pids)
            {
                ret = gst_pad_set_blocked_async (queue_srcpad, TRUE, (GstPadBlockCallback) pad_block_cb, app);
                GST_DEBUG ("BLOCKING %s returned %i", srcpadname, ret);
            }
            if (gst_pad_link (demuxpad, parser_sinkpad) == 0)
            {
                if (gst_pad_link (parser_srcpad, queue_sinkpad) == 0)
                {
                    if (gst_pad_link (queue_srcpad, mux_sinkpad) == 0) {
                        g_fprintf
                        (stdout, "linked: Source PID %d to %s\n",
                         app->a_source_pids[0], sinkpadname);
                        g_signal_connect (G_OBJECT (mux_sinkpad), "notify::caps", G_CALLBACK (mux_pad_has_caps_cb), app);
                        fflush(stdout);
                    } else {
                        bdremux_errout(g_strdup_printf("Couldn't link %s:%s to %s:%s", GST_DEBUG_PAD_NAME(queue_srcpad), GST_DEBUG_PAD_NAME(mux_sinkpad)));
                    }
                } else {
                    bdremux_errout(g_strdup_printf("Couldn't link %s:%s to %s:%s @%p", GST_DEBUG_PAD_NAME(parser_srcpad), GST_DEBUG_PAD_NAME(queue_sinkpad), queue_sinkpad));
                }
            } else {
                bdremux_errout(g_strdup_printf("Couldn't link %s:%s to %s:%s", GST_DEBUG_PAD_NAME(demuxpad), GST_DEBUG_PAD_NAME(parser_sinkpad)));
            }
        }
    } else if (g_ascii_strncasecmp (demuxpadname, "audio", 5) == 0) {
        sscanf (demuxpadname + 6, "%x", &sourcepid);
        if (app->auto_pids)
        {
            if (app->no_source_pids == 0)
                i = 1;
            else
                i = app->no_source_pids;
            app->a_source_pids[i] = sourcepid;
            if (app->a_sink_pids[i] == -1)
            {
                app->a_sink_pids[i] = sourcepid;
                app->no_sink_pids++;
            }
            app->no_source_pids++;
        }
        for (i = 1; i < app->no_source_pids; i++) {
            if (sourcepid == app->a_source_pids[i]) {
                if (gst_structure_has_name (s, "audio/mpeg")) {
                    app->audioparsers[i] = gst_element_factory_make ("mpegaudioparse", NULL);
                    if (!app->audioparsers[i]) {
                        bdremux_errout("mpegaudioparse not found! please install gst-plugin-mpegaudioparse!");
                    }
                }
                else if (gst_structure_has_name (s, "audio/x-ac3")) {
                    app->audioparsers[i] = gst_element_factory_make ("ac3parse", NULL);
                    if (!app->audioparsers[i]) {
                        bdremux_errout("mpegaudioparse not found! please install gst-plugin-audioparses!");
                    }
                }
                else if (gst_structure_has_name (s, "audio/x-dts")) {
                    app->audioparsers[i] = gst_element_factory_make ("dcaparse", NULL);
                    if (!app->audioparsers[i]) {
                        bdremux_errout("dcaparse not found! please install gst-plugin-audioparses!");
                    }
                }
                else {
                    bdremux_errout(g_strdup_printf("could not find parser for audio stream with pid 0x%04x!", sourcepid));
                }
                gst_bin_add (GST_BIN (app->pipeline), app->audioparsers[i]);
                gst_element_set_state (app->audioparsers[i], GST_STATE_PLAYING);
                parser_sinkpad = gst_element_get_static_pad (app->audioparsers[i], "sink");
                parser_srcpad = gst_element_get_static_pad (app->audioparsers[i], "src");
                g_sprintf (sinkpadname, "sink%d", app->a_sink_pids[i]);
                g_sprintf (srcpadname, "src%d", app->a_sink_pids[i]);
                queue_sinkpad = gst_element_get_request_pad (app->queue, sinkpadname);
                queue_srcpad = gst_element_get_static_pad(app->queue, srcpadname);
                g_sprintf (sinkpadname, "sink_%d", app->a_sink_pids[i]);
                mux_sinkpad = gst_element_get_request_pad (app->m2tsmux, sinkpadname);
                app->requested_pid_count++;
                if (app->requested_pid_count <= app->no_source_pids)
                {
                    ret = gst_pad_set_blocked_async (queue_srcpad, TRUE, (GstPadBlockCallback) pad_block_cb, app);
                    GST_DEBUG ("BLOCKING %s returned %i", srcpadname, ret);
                }
                if (gst_pad_link (demuxpad, parser_sinkpad) == 0
                        && gst_pad_link (parser_srcpad, queue_sinkpad) == 0
                        && gst_pad_link (queue_srcpad, mux_sinkpad) == 0) {
                    g_print
                    ("linked: Source PID %d to %s\n",
                     app->a_source_pids[i], sinkpadname);
                    g_signal_connect (G_OBJECT (mux_sinkpad), "notify::caps", G_CALLBACK (mux_pad_has_caps_cb), app);
                } else
                    bdremux_errout (g_strdup_printf("Couldn't link audio PID 0x%04x to sink PID 0x%04x",
                                                    app->a_source_pids[i], app->a_sink_pids[i]));
                break;
            }
        }
    } else
        GST_INFO ("Ignoring pad %s!", demuxpadname);

    if (parser_sinkpad)
        gst_object_unref (parser_sinkpad);
    if (parser_srcpad)
        gst_object_unref (parser_srcpad);
    if (queue_sinkpad)
        gst_object_unref (queue_sinkpad);
    if (queue_srcpad)
        gst_object_unref (queue_srcpad);
    if (mux_sinkpad)
        gst_object_unref (mux_sinkpad);
    if (caps)
        gst_caps_unref (caps);

//   g_print("app->requested_pid_count = %i, app->no_source_pids = %i\n", app->requested_pid_count, app->no_source_pids);
    if (!app->auto_pids && app->requested_pid_count == app->no_source_pids)
    {
        GST_INFO("All %i source PIDs have been linked to the mux -> UNBLOCKING all pads and start muxing", app->requested_pid_count);
        for (i = 0; i < app->no_sink_pids; i++)
        {
            g_sprintf (srcpadname, "src%d", app->a_sink_pids[i]);
            queue_srcpad = gst_element_get_static_pad(app->queue, srcpadname);
            ret = gst_pad_set_blocked_async (queue_srcpad, FALSE, (GstPadBlockCallback) pad_block_cb, app);
            GST_DEBUG ("UNBLOCKING %s returned %i", srcpadname, ret);
        }
    }

    g_free (demuxpadname);
    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(app->pipeline),GST_DEBUG_GRAPH_SHOW_ALL,"bdremux_pipelinegraph_pad_added");
}
Esempio n. 11
0
static gboolean
bus_message (GstBus * bus, GstMessage * message, App * app)
{
    gchar *sourceName;
    GstObject *source;
    gchar *string;
    GstState current_state;

    if (!message)
        return FALSE;
    source = GST_MESSAGE_SRC (message);
    if (!GST_IS_OBJECT (source))
        return FALSE;
    sourceName = gst_object_get_name (source);

    if (gst_message_get_structure (message))
        string = gst_structure_to_string (gst_message_get_structure (message));
    else
        string = g_strdup (GST_MESSAGE_TYPE_NAME (message));
    GST_DEBUG("gst_message from %s: %s", sourceName, string);
    g_free (string);

    switch (GST_MESSAGE_TYPE (message)) {
    case GST_MESSAGE_ERROR:
    {
        GError *gerror;
        gchar *debug;

        gst_message_parse_error (message, &gerror, &debug);
        gst_object_default_error (GST_MESSAGE_SRC (message), gerror, debug);
        g_error_free (gerror);
        g_free (debug);

        g_main_loop_quit (app->loop);
        break;
    }
    case GST_MESSAGE_WARNING:
    {
        GError *gerror;
        gchar *debug;

        gst_message_parse_warning (message, &gerror, &debug);
        gst_object_default_error (GST_MESSAGE_SRC (message), gerror, debug);
        g_error_free (gerror);
        g_free (debug);

//       g_main_loop_quit (app->loop);
        break;
    }
    case GST_MESSAGE_EOS:
        g_message ("received EOS");
        g_main_loop_quit (app->loop);
        break;
    case GST_MESSAGE_ASYNC_DONE:
        break;
    case GST_MESSAGE_ELEMENT:
    {
        const GstStructure *msgstruct = gst_message_get_structure (message);
        if (msgstruct) {
            const gchar *eventname = gst_structure_get_name (msgstruct);
            if (!strcmp (eventname, "seekable"))
                app->is_seekable = TRUE;
        }
        break;
    }
    case GST_MESSAGE_STATE_CHANGED:
    {
        GstState old_state, new_state;
        GstStateChange transition;
        if (GST_MESSAGE_SRC (message) != GST_OBJECT (app->tsdemux))
            break;

        gst_message_parse_state_changed (message, &old_state, &new_state, NULL);
        transition = (GstStateChange) GST_STATE_TRANSITION (old_state, new_state);

        switch (transition) {
        case GST_STATE_CHANGE_NULL_TO_READY:
            break;
        case GST_STATE_CHANGE_READY_TO_PAUSED:
            break;
        case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
        {

        }
        break;
        case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
            break;
        case GST_STATE_CHANGE_PAUSED_TO_READY:
            break;
        case GST_STATE_CHANGE_READY_TO_NULL:
            break;
        }
        break;
    }
    case GST_MESSAGE_SEGMENT_DONE:
    {
        GST_DEBUG ("GST_MESSAGE_SEGMENT_DONE!!!");
        do_seek (app);
    }
    default:
        break;
    }
    gst_element_get_state (app->pipeline, &current_state, NULL, 0);
    if (app->current_segment == 0 && app->segment_count /*&& app->is_seekable*/
            && current_state == GST_STATE_PLAYING)
        do_seek (app);
    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(app->pipeline),GST_DEBUG_GRAPH_SHOW_ALL,"bdremux_pipelinegraph_message");
    return TRUE;
}
Esempio n. 12
0
static VALUE
rb_gst_bin_to_dot_file(VALUE self, VALUE details, VALUE filename)
{
    GST_DEBUG_BIN_TO_DOT_FILE(SELF(self), NUM2INT(details), RVAL2CSTR(filename));
    return Qnil;
}
Esempio n. 13
0
File: m1.cpp Progetto: jhgorse/mog
///////////////////////////////////////////////////////////////////////////////////////////////////
/// main()
///
/// The main function. Creates the pipeline and makes it go.
///////////////////////////////////////////////////////////////////////////////////////////////////
int main(int argc, char *argv[])
{
	// Initialize GStreamer
	gst_init (&argc, &argv);
	
	// Parse the target hostname/IP
	if (argc != 2)
	{
		g_printerr("Usage: %s [host or ip]\n", argv[0]);
		return -1;
	}
	const gchar* target = argv[1];
	g_assert(target != NULL);

	// Parse the pipeline from the string above
	pipeline = gst_parse_launch(PIPELINE_STRING, NULL);
	if (pipeline == NULL)
	{
		g_printerr("Failed to create pipeline!\n");
		return -1;
	}
	
	// Set the clients property of the UDP sink elements
	GstElement* element = gst_bin_get_by_name(GST_BIN(pipeline), "vsink");
	g_assert(element != NULL);
	const gchar* clients_value = g_strdup_printf("%s:10000", target);
	g_object_set(element, "clients", clients_value, NULL);
	g_free(const_cast<gchar*>(clients_value));
	gst_object_unref(element);
	
	element = gst_bin_get_by_name(GST_BIN(pipeline), "vcsink");
	g_assert(element != NULL);
	clients_value = g_strdup_printf("%s:10001", target);
	g_object_set(element, "clients", clients_value, NULL);
	g_free(const_cast<gchar*>(clients_value));
	gst_object_unref(element);
	
	element = gst_bin_get_by_name(GST_BIN(pipeline), "asink");
	g_assert(element != NULL);
	clients_value = g_strdup_printf("%s:10002", target);
	g_object_set(element, "clients", clients_value, NULL);
	g_free(const_cast<gchar*>(clients_value));
	gst_object_unref(element);
	
	element = gst_bin_get_by_name(GST_BIN(pipeline), "acsink");
	g_assert(element != NULL);
	clients_value = g_strdup_printf("%s:10003", target);
	g_object_set(element, "clients", clients_value, NULL);
	g_free(const_cast<gchar*>(clients_value));
	gst_object_unref(element);

	// Create a pipeline tracer for latency / jitter information
	PipelineTracer* pTracer = new PipelineTracer(pipeline);

	// Put the pipeline in the playing state
	GstStateChangeReturn ret = gst_element_set_state(pipeline, GST_STATE_PLAYING);
	if (ret == GST_STATE_CHANGE_FAILURE)
	{
		g_printerr("Unable to set the pipeline to the playing state.\n");
		gst_object_unref(pipeline);
		return -1;
	}
	
	// Dump to dot file (if GST_DEBUG_DUMP_DOT_DIR is set) to ${GST_DEBUG_DUMP_DOT_DIR}/.dot.
	// We wait until the pipeline is playing to make sure pads are linked.
	GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, argv[0]);
	
	// Assign the SIGINT handler to send EOS
	struct sigaction sigact;
	sigact.sa_handler = on_sig_int;
	sigemptyset(&sigact.sa_mask);
	sigact.sa_flags = 0;
	sigaction(SIGINT, &sigact, NULL);
	g_print("Playing... press Ctrl-C to terminate.\n");
  
	// Wait until error or EOS
	GstBus* bus = gst_element_get_bus(pipeline);
	GstMessage* msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, static_cast<GstMessageType>(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
   
	// Parse message and print stuff about it.
	if (msg != NULL)
	{
		GError *err;
		gchar *debug_info;
		
		switch (GST_MESSAGE_TYPE(msg))
		{
			case GST_MESSAGE_ERROR:
				gst_message_parse_error(msg, &err, &debug_info);
				g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
				g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none");
				g_clear_error(&err);
				g_free(debug_info);
				break;
				
			case GST_MESSAGE_EOS:
				g_print("End-Of-Stream reached.\n");
				break;
				
			default:
				// We should not reach here because we only asked for ERRORs and EOS
				g_printerr("Unexpected message received.\n");
				break;
		} // END switch(message type)
		gst_message_unref(msg);
	} // END if (message)

	// Free resources
	delete pTracer;
	gst_object_unref(bus);
	gst_element_set_state(pipeline, GST_STATE_NULL);
	gst_object_unref(pipeline);
	
	return 0;
} // END main()
Esempio n. 14
0
BOOL tsmf_gstreamer_pipeline_build(TSMFGstreamerDecoder* mdecoder)
{
#if GST_VERSION_MAJOR > 0
	const char* video = "appsrc name=videosource ! queue2 name=videoqueue ! decodebin name=videodecoder !";
        const char* audio = "appsrc name=audiosource ! queue2 name=audioqueue ! decodebin name=audiodecoder ! audioconvert ! audiorate ! audioresample ! volume name=audiovolume !";
#else
	const char* video = "appsrc name=videosource ! queue2 name=videoqueue ! decodebin2 name=videodecoder !";
	const char* audio = "appsrc name=audiosource ! queue2 name=audioqueue ! decodebin2 name=audiodecoder ! audioconvert ! audiorate ! audioresample ! volume name=audiovolume !";
#endif
	char pipeline[1024];

	if (!mdecoder)
		return FALSE;

	/* TODO: Construction of the pipeline from a string allows easy overwrite with arguments.
	 *       The only fixed elements necessary are appsrc and the volume element for audio streams.
	 *       The rest could easily be provided in gstreamer pipeline notation from command line. */
	if (mdecoder->media_type == TSMF_MAJOR_TYPE_VIDEO)
		sprintf_s(pipeline, sizeof(pipeline), "%s %s name=videosink", video, tsmf_platform_get_video_sink());
	else
		sprintf_s(pipeline, sizeof(pipeline), "%s %s name=audiosink", audio, tsmf_platform_get_audio_sink());

	DEBUG_TSMF("pipeline=%s", pipeline);
	mdecoder->pipe = gst_parse_launch(pipeline, NULL);

	if (!mdecoder->pipe)
	{
		WLog_ERR(TAG, "Failed to create new pipe");
		return FALSE;
	}

	if (mdecoder->media_type == TSMF_MAJOR_TYPE_VIDEO)
		mdecoder->src = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "videosource");
	else
		mdecoder->src = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "audiosource");

	if (!mdecoder->src)
	{
		WLog_ERR(TAG, "Failed to get appsrc");
		return FALSE;
	}

	if (mdecoder->media_type == TSMF_MAJOR_TYPE_VIDEO)
		mdecoder->queue = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "videoqueue");
	else
		mdecoder->queue = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "audioqueue");

	if (!mdecoder->queue)
	{
		WLog_ERR(TAG, "Failed to get queue");
		return FALSE;
	}

	if (mdecoder->media_type == TSMF_MAJOR_TYPE_VIDEO)
		mdecoder->outsink = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "videosink");
	else
		mdecoder->outsink = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "audiosink");

	if (!mdecoder->outsink)
	{
		WLog_ERR(TAG, "Failed to get sink");
		return FALSE;
	}

	g_signal_connect(mdecoder->outsink, "child-added", G_CALLBACK(cb_child_added), mdecoder);

	if (mdecoder->media_type == TSMF_MAJOR_TYPE_AUDIO)
	{
		mdecoder->volume = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "audiovolume");

		if (!mdecoder->volume)
		{
			WLog_ERR(TAG, "Failed to get volume");
			return FALSE;
		}

		tsmf_gstreamer_change_volume((ITSMFDecoder*)mdecoder, mdecoder->gstVolume*((double) 10000), mdecoder->gstMuted);
	}

	tsmf_platform_register_handler(mdecoder);
	/* AppSrc settings */
	GstAppSrcCallbacks callbacks =
	{
		tsmf_gstreamer_need_data,
		tsmf_gstreamer_enough_data,
		tsmf_gstreamer_seek_data
	};
	g_object_set(mdecoder->src, "format", GST_FORMAT_TIME, NULL);
	g_object_set(mdecoder->src, "is-live", FALSE, NULL);
	g_object_set(mdecoder->src, "block", FALSE, NULL);
	g_object_set(mdecoder->src, "blocksize", 1024, NULL);
	gst_app_src_set_caps((GstAppSrc *) mdecoder->src, mdecoder->gst_caps);
	gst_app_src_set_callbacks((GstAppSrc *)mdecoder->src, &callbacks, mdecoder, NULL);
	gst_app_src_set_stream_type((GstAppSrc *) mdecoder->src, GST_APP_STREAM_TYPE_SEEKABLE);
	gst_app_src_set_latency((GstAppSrc *) mdecoder->src, 0, -1);
	gst_app_src_set_max_bytes((GstAppSrc *) mdecoder->src, (guint64) 0);//unlimited
	g_object_set(G_OBJECT(mdecoder->queue), "use-buffering", FALSE, NULL);
	g_object_set(G_OBJECT(mdecoder->queue), "use-rate-estimate", FALSE, NULL);
	g_object_set(G_OBJECT(mdecoder->queue), "max-size-buffers", 0, NULL);
	g_object_set(G_OBJECT(mdecoder->queue), "max-size-bytes", 0, NULL);
	g_object_set(G_OBJECT(mdecoder->queue), "max-size-time", (guint64) 0, NULL);

	/* Only set these properties if not an autosink, otherwise we will set properties when real sinks are added */
	if (!g_strcmp0(G_OBJECT_TYPE_NAME(mdecoder->outsink), "GstAutoVideoSink") && !g_strcmp0(G_OBJECT_TYPE_NAME(mdecoder->outsink), "GstAutoAudioSink"))
	{
		if (mdecoder->media_type == TSMF_MAJOR_TYPE_VIDEO)
		{
			gst_base_sink_set_max_lateness((GstBaseSink *) mdecoder->outsink, 10000000); /* nanoseconds */
		}
		else
		{
			gst_base_sink_set_max_lateness((GstBaseSink *) mdecoder->outsink, 10000000); /* nanoseconds */
			g_object_set(G_OBJECT(mdecoder->outsink), "buffer-time", (gint64) 20000, NULL); /* microseconds */
			g_object_set(G_OBJECT(mdecoder->outsink), "drift-tolerance", (gint64) 20000, NULL); /* microseconds */
			g_object_set(G_OBJECT(mdecoder->outsink), "latency-time", (gint64) 10000, NULL); /* microseconds */
			g_object_set(G_OBJECT(mdecoder->outsink), "slave-method", 1, NULL);
		}
		g_object_set(G_OBJECT(mdecoder->outsink), "sync", TRUE, NULL); /* synchronize on the clock */
		g_object_set(G_OBJECT(mdecoder->outsink), "async", TRUE, NULL); /* no async state changes */
	}

	tsmf_window_create(mdecoder);
	tsmf_gstreamer_pipeline_set_state(mdecoder, GST_STATE_READY);
	tsmf_gstreamer_pipeline_set_state(mdecoder, GST_STATE_PLAYING);
	mdecoder->pipeline_start_time_valid = 0;
	mdecoder->shutdown = 0;
	mdecoder->paused = FALSE;

	GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(mdecoder->pipe), GST_DEBUG_GRAPH_SHOW_ALL, get_type(mdecoder));

	return TRUE;
}