コード例 #1
0
ファイル: util.c プロジェクト: rodrimc/libmicromb
gboolean
set_video_bin(GstElement *bin, MbMedia *media, GstPad *decoder_src_pad)
{
  GstElement *sink_element = NULL;
  GstCaps *caps = NULL;
  GstPad *sink_pad = NULL, *ghost_pad = NULL, *output_sink_pad = NULL;
  GstPadLinkReturn ret;
  gchar *uri = NULL;
  gboolean is_image = FALSE;
  int return_code = TRUE;

  g_assert (media->video_scaler);
  g_assert (media->video_filter);

  gst_element_set_state(media->video_scaler, GST_STATE_PAUSED);
  gst_element_set_state(media->video_filter, GST_STATE_PAUSED);

  caps = gst_caps_new_simple ("video/x-raw",
      "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
      "width", G_TYPE_INT, media->width,
      "height", G_TYPE_INT, media->height,
      NULL);

  g_object_set (G_OBJECT (media->video_scaler), "add-borders", 0, NULL);
  g_object_set (G_OBJECT (media->video_filter), "caps", caps, NULL);

  gst_bin_add_many(GST_BIN (bin), media->video_scaler, media->video_filter,
      NULL);
  if (!gst_element_link (media->video_scaler, media->video_filter))
  {
    g_debug ("Could not link elements together.\n");
    gst_object_unref (media->video_scaler);
    gst_object_unref (media->video_filter);
    return FALSE;
  }

  sink_element = media->video_scaler;

  g_object_get (G_OBJECT(media->decoder), "uri", &uri, NULL);
  is_image = has_image_extension(uri);

  g_free (uri);

  if (is_image)
  {
    media->image_freezer = gst_element_factory_make("imagefreeze", NULL);
    g_assert (media->image_freezer);

    gst_bin_add (GST_BIN(bin), media->image_freezer);

    if (!gst_element_link (media->image_freezer, media->video_scaler))
    {
      g_debug("Could not link image element.\n");
      gst_object_unref(media->image_freezer);
      return FALSE;
    }

    gst_element_set_state(media->image_freezer, GST_STATE_PAUSED);
    sink_element = media->image_freezer;
  }

  sink_pad = gst_element_get_static_pad (sink_element, "sink");
  g_assert(sink_pad);
  ret = gst_pad_link (decoder_src_pad, sink_pad);

  if (GST_PAD_LINK_FAILED(ret))
    g_debug (" Link failed.\n");
  else
    g_debug (" Link succeeded.\n");

  ghost_pad = gst_ghost_pad_new (
      "v_src", gst_element_get_static_pad (media->video_filter, "src"));

  gst_pad_set_active (ghost_pad, TRUE);
  gst_element_add_pad (bin, ghost_pad);

  output_sink_pad = gst_element_get_request_pad(_mb_global_data.video_mixer,
      "sink_%u");
  g_assert (output_sink_pad);

  media->video_pad_name = gst_pad_get_name (output_sink_pad);
  g_debug ("videomixer: new pad requested (%s)\n", media->video_pad_name);

  ret = gst_pad_link (ghost_pad, output_sink_pad);
  if (GST_PAD_LINK_FAILED(ret))
  {
    return_code = FALSE;
    g_debug (" Could not link %s and videomixer together\n", media->name);
  }
  else
  {
    g_object_set (output_sink_pad, "xpos", media->x_pos, NULL);
    g_object_set (output_sink_pad, "ypos", media->y_pos, NULL);
    g_object_set (output_sink_pad, "zorder", media->z_index, NULL);
    g_object_set (output_sink_pad, "alpha", media->alpha, NULL);

    g_debug (" Link succeeded between %s and videomixer.\n", media->name);
  }

  if (is_image)
    gst_element_set_state(media->image_freezer, GST_STATE_PLAYING);

  gst_element_set_state (media->video_scaler, GST_STATE_PLAYING);
  gst_element_set_state (media->video_filter, GST_STATE_PLAYING);

  gst_caps_unref(caps);
  gst_object_unref (output_sink_pad);
  gst_object_unref(sink_pad);

  return return_code;
}
コード例 #2
0
ファイル: util.c プロジェクト: rodrimc/libmicromb
static void
set_background ()
{
  GstElement *bg_src = NULL, *bg_scaler = NULL, *bg_capsfilter = NULL, 
             *audio_src = NULL, *audio_conv = NULL,
             *audio_resample = NULL, *audio_capsfilter = NULL;
  GstCaps *caps;

  /* black background */
  bg_src = gst_element_factory_make ("videotestsrc", NULL);
  g_assert (bg_src);

  bg_scaler = gst_element_factory_make ("videoscale", NULL);
  g_assert (bg_scaler);

  bg_capsfilter = gst_element_factory_make ("capsfilter", NULL);
  g_assert (bg_capsfilter);

  g_object_set (G_OBJECT (bg_src), "pattern", /*black*/ 2, NULL);

  caps = gst_caps_new_simple (
      "video/x-raw",
      "framerate", GST_TYPE_FRACTION, 25, 1,
      "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
      "width", G_TYPE_INT, _mb_global_data.window_width,
      "height", G_TYPE_INT, _mb_global_data.window_height,
      NULL);
  g_assert (caps);

  g_object_set (G_OBJECT (bg_scaler), "add-borders", 0, NULL);
  g_object_set (G_OBJECT(bg_capsfilter), "caps", caps, NULL);

  gst_caps_unref(caps);

  /* silence audio */
  audio_src = gst_element_factory_make ("audiotestsrc", NULL);
  g_assert (audio_src);

  audio_conv = gst_element_factory_make ("audioconvert", NULL);
  g_assert (audio_src);

  audio_resample = gst_element_factory_make ("audioresample", NULL);
  g_assert (audio_resample);

  audio_capsfilter = gst_element_factory_make ("capsfilter", NULL);
  g_assert (audio_capsfilter);

  g_object_set (G_OBJECT (audio_src), "wave", /*silence*/ 4, NULL);

  caps = gst_caps_from_string (audio_caps);
  g_assert (caps);
  g_object_set (audio_capsfilter, "caps", caps, NULL);

  gst_caps_unref(caps);

  gst_bin_add_many(GST_BIN(_mb_global_data.pipeline), bg_src, bg_scaler,
      bg_capsfilter, audio_src, audio_conv, audio_resample,
      audio_capsfilter, NULL);

  if (!gst_element_link_many(bg_src, bg_scaler, bg_capsfilter, NULL) ||
      !gst_element_link_many (audio_src, audio_conv, audio_resample,
        audio_capsfilter, NULL))
  {
    g_debug ("Could not link background elements together.\n");
    mb_clean_up ();
  }
  else
  {
    GstPad *src_pad = NULL, *mixer_sink_pad = NULL;
    GstPadLinkReturn ret;
    GstCaps *audio_caps = NULL;
    GstStructure *audio_caps_structure = NULL;
    int rate;

    /* video */
    src_pad = gst_element_get_static_pad(bg_capsfilter, "src");
    g_assert (src_pad);

    mixer_sink_pad = gst_element_get_request_pad(_mb_global_data.video_mixer,
        "sink_%u");
    g_assert (mixer_sink_pad);

    ret = gst_pad_link (src_pad, mixer_sink_pad);
    if (GST_PAD_LINK_FAILED(ret))
      g_debug (" Background video link failed.\n");
    else
      g_debug (" Background video link succeeded.\n");

    gst_object_unref (src_pad);
    gst_object_unref (mixer_sink_pad);
    src_pad = NULL;
    mixer_sink_pad = NULL;

    /* audio */
    src_pad = gst_element_get_static_pad(audio_capsfilter, "src");
    g_assert (src_pad);

    mixer_sink_pad = gst_element_get_request_pad(_mb_global_data.audio_mixer,
        "sink_%u");
    g_assert (mixer_sink_pad);

    ret = gst_pad_link (src_pad, mixer_sink_pad);
    if (GST_PAD_LINK_FAILED(ret))
      g_debug (" Background audio link failed.\n");
    else
      g_debug (" Background audio link succeeded.\n");

    audio_caps = gst_pad_query_caps (src_pad, NULL);
    audio_caps_structure = gst_caps_get_structure (audio_caps, 0);
    if (gst_structure_has_field(audio_caps_structure, "rate"))
      gst_structure_get_int(audio_caps_structure, "rate", &rate);

    gst_caps_unref(audio_caps);
    gst_object_unref (src_pad);
    gst_object_unref (mixer_sink_pad);
  }
}
コード例 #3
0
ファイル: util.c プロジェクト: rodrimc/libmicromb
int
init (int width, int height, gboolean sync)
{
  GstStateChangeReturn ret;
  GstState current_state;

  if (!gst_init_check(NULL, NULL, NULL))
  {
    g_debug ("Failed to initialize gstreamer...\n");
    return FALSE;
  }

  _mb_global_data.pipeline = gst_pipeline_new ("pipeline");
  g_assert (_mb_global_data.pipeline);

  g_mutex_init (&(_mb_global_data.mutex));

  _mb_global_data.video_mixer 		= NULL;
  _mb_global_data.audio_mixer 		= NULL;
  _mb_global_data.video_sink  		= NULL;
  _mb_global_data.audio_sink  		= NULL;
  _mb_global_data.clock_provider	= NULL;
  _mb_global_data.loop            = NULL;
  _mb_global_data.bus             = NULL;
  _mb_global_data.window_width 		= width;
  _mb_global_data.window_height 	= height;
  _mb_global_data.initialized     = FALSE;
  _mb_global_data.sync            = sync;

  _mb_global_data.media_table = g_hash_table_new (g_str_hash, NULL);

  /* video */
  _mb_global_data.video_mixer = gst_element_factory_make("videomixer", 
      "video_mixer");
  g_assert(_mb_global_data.video_mixer);

  _mb_global_data.video_sink = gst_element_factory_make("xvimagesink",
      "video_sink");
  g_assert (_mb_global_data.video_sink);

  /* audio */
  _mb_global_data.audio_mixer = gst_element_factory_make("adder", 
      "audio_mixer");
  g_assert(_mb_global_data.audio_mixer);

  _mb_global_data.audio_sink = gst_element_factory_make("autoaudiosink", 
      "audio_sink");
  g_assert (_mb_global_data.audio_sink);

  gst_bin_add_many(GST_BIN (_mb_global_data.pipeline),
      _mb_global_data.video_mixer, _mb_global_data.video_sink,
      _mb_global_data.audio_mixer, _mb_global_data.audio_sink,
      NULL);

  if (!gst_element_link(_mb_global_data.video_mixer,
        _mb_global_data.video_sink))
  {
    g_debug ("Could not link video mixer and sink together.\n");
    mb_clean_up ();
    return FALSE;
  }

  if (!gst_element_link(_mb_global_data.audio_mixer, 
        _mb_global_data.audio_sink))
  {
    g_debug ("Could not link audio mixer and sink together.\n");
    mb_clean_up ();
    return FALSE;
  }

  gst_bus_add_watch (mb_get_message_bus (), bus_cb, NULL);

  set_background();

  gst_element_set_state (_mb_global_data.pipeline, GST_STATE_PLAYING);

  if (_mb_global_data.sync == TRUE)
  {
    do
    {
      ret = gst_element_get_state (_mb_global_data.pipeline, &current_state,
          NULL, GST_CLOCK_TIME_NONE);
      if (ret == GST_STATE_CHANGE_FAILURE)
      {
        mb_clean_up ();
        return FALSE;
      }
    } while (current_state != GST_STATE_PLAYING);
    _mb_global_data.clock_provider = gst_element_get_clock(
        _mb_global_data.pipeline);
  }

  if ((_mb_global_data.loop_thread = g_thread_new ("bus_thread",
          (GThreadFunc) main_loop_thread,
          NULL)) == NULL)
  {
    g_debug ("Could not create message handler thread.\n");
    mb_clean_up();
    return FALSE;
  }


  return TRUE;
}
コード例 #4
0
ファイル: ats_tree.c プロジェクト: Freyr666/ats-analyzer
ATS_TREE*
ats_tree_new(guint    stream_id,
	     gchar*   ip,
	     guint    port,
	     GError** error)
{
  ATS_TREE       *rval;
  GstElement     *parse, *fakesink;
  GstPadTemplate *tee_src_pad_template;
  GstPad         *teepad, *sinkpad;
  
  /* init-ing tree */
  rval              = g_new(ATS_TREE, 1);
  rval->pipeline    = NULL;
  rval->source      = NULL;
  parse             = NULL;
  fakesink          = NULL;
  rval->faketee.tee = NULL;
  rval->faketee.pad = NULL;
  rval->branches    = NULL;
  
  /* creating elements */
  if ((rval->pipeline = gst_pipeline_new("proc-tree-pipe")) == NULL)
    goto error;
  
  if ((rval->source = gst_element_factory_make("udpsrc","proc-tree-source")) == NULL)
    goto error;
  
  if ((parse = gst_element_factory_make("tsparse","proc-tree-parse")) == NULL)
    goto error;
  
  if ((rval->faketee.tee = gst_element_factory_make("tee","proc-tree-tee")) == NULL)
    goto error;
  
  if ((fakesink = gst_element_factory_make("fakesink",NULL)) == NULL)
    goto error;
  
  /* init-ing tree metadata */
  rval->metadata = ats_metadata_new(stream_id);
  
  /* setting udpsrc port and buf size*/
  g_object_set (G_OBJECT (rval->source),
		"timeout",     5000000000,
		"buffer-size", 2147483647,
		"port",        port,
		"address",     ip,
		NULL);

  g_object_set(G_OBJECT(parse),
	       "parse-private-sections", TRUE,
	       NULL);
  
  /* linking pipeline */
  gst_bin_add_many(GST_BIN(rval->pipeline),
		   rval->source,
		   parse,
		   rval->faketee.tee,
		   fakesink, NULL);
  
  gst_element_link_many (rval->source,
			 parse,
			 rval->faketee.tee, NULL);

  /* connecting tee src to fakesink */
  sinkpad = gst_element_get_static_pad(fakesink, "sink");
  tee_src_pad_template =
    gst_element_class_get_pad_template(GST_ELEMENT_GET_CLASS
				       (rval->faketee.tee),
				       "src_%u");
  teepad = gst_element_request_pad(rval->faketee.tee,
				   tee_src_pad_template,
				   NULL, NULL);
  gst_pad_link(teepad, sinkpad);
  gst_object_unref(tee_src_pad_template);
  gst_object_unref(teepad);

  /* creating additional tee src pad for other brunches */
  tee_src_pad_template =
    gst_element_class_get_pad_template(GST_ELEMENT_GET_CLASS(rval->faketee.tee),
				       "src_%u");
  rval->faketee.pad = gst_element_request_pad(rval->faketee.tee,
					      tee_src_pad_template,
					      NULL, NULL);
  gst_object_unref(tee_src_pad_template);
  
  return rval;
 error:
  if (rval->pipeline)    gst_object_unref(rval->pipeline);
  if (rval->source)      gst_object_unref(rval->source);
  if (parse)             gst_object_unref(parse);
  if (rval->faketee.tee) gst_object_unref(rval->faketee.tee);
  if (fakesink)          gst_object_unref(fakesink);
  if (rval)              g_free(rval);
  g_set_error(error,
	      G_ERR_UNKNOWN, -1,
	      "Error: failed to create tree in ats_tree_new");
  return NULL;
}
コード例 #5
0
int
main (int argc, char **argv)
{
  static const GOptionEntry test_goptions[] = {
    {"videosink", '\0', 0, G_OPTION_ARG_STRING, &opt_videosink_str,
        "videosink to use (default: " DEFAULT_VIDEOSINK ")", NULL},
    {"caps", '\0', 0, G_OPTION_ARG_STRING, &opt_filtercaps_str,
        "filter caps to narrow down formats to test", NULL},
    {"with-ffmpegcolorspace", '\0', 0, G_OPTION_ARG_NONE,
          &opt_with_ffmpegcolorspace,
          "whether to add an ffmpegcolorspace element in front of the sink",
        NULL},
    {NULL, '\0', 0, 0, NULL, NULL, NULL}
  };
  GOptionContext *ctx;
  GError *opt_err = NULL;

  GstElement *pipeline, *src, *filter1, *crop, *scale, *filter2, *csp, *sink;
  GstCaps *filter_caps = NULL;
  GList *caps_list, *l;

#if !GLIB_CHECK_VERSION (2, 31, 0)
  if (!g_thread_supported ())
    g_thread_init (NULL);
#endif

  /* command line option parsing */
  ctx = g_option_context_new ("");
  g_option_context_add_group (ctx, gst_init_get_option_group ());
  g_option_context_add_main_entries (ctx, test_goptions, NULL);

  if (!g_option_context_parse (ctx, &argc, &argv, &opt_err)) {
    g_error ("Error parsing command line options: %s", opt_err->message);
    return -1;
  }

  GST_DEBUG_CATEGORY_INIT (videocrop_test_debug, "videocroptest", 0, "vctest");

  pipeline = gst_pipeline_new ("pipeline");
  src = gst_element_factory_make ("videotestsrc", "videotestsrc");
  g_assert (src != NULL);
  filter1 = gst_element_factory_make ("capsfilter", "capsfilter1");
  g_assert (filter1 != NULL);
  crop = gst_element_factory_make ("videocrop", "videocrop");
  g_assert (crop != NULL);
  scale = gst_element_factory_make ("videoscale", "videoscale");
  g_assert (scale != NULL);
  filter2 = gst_element_factory_make ("capsfilter", "capsfilter2");
  g_assert (filter2 != NULL);

  if (opt_with_ffmpegcolorspace) {
    g_print ("Adding ffmpegcolorspace\n");
    csp = gst_element_factory_make ("ffmpegcolorspace", "colorspace");
  } else {
    csp = gst_element_factory_make ("identity", "colorspace");
  }
  g_assert (csp != NULL);

  if (opt_filtercaps_str) {
    filter_caps = gst_caps_from_string (opt_filtercaps_str);
    if (filter_caps == NULL) {
      g_error ("Invalid filter caps string '%s'", opt_filtercaps_str);
    } else {
      g_print ("Using filter caps '%s'\n", opt_filtercaps_str);
    }
  }

  if (opt_videosink_str) {
    g_print ("Trying videosink '%s' ...", opt_videosink_str);
    sink = gst_element_factory_make (opt_videosink_str, "sink");
    g_print ("%s\n", (sink) ? "ok" : "element couldn't be created");
  } else {
    sink = NULL;
  }

  if (sink == NULL) {
    g_print ("Trying videosink '%s' ...", DEFAULT_VIDEOSINK);
    sink = gst_element_factory_make (DEFAULT_VIDEOSINK, "sink");
    g_print ("%s\n", (sink) ? "ok" : "element couldn't be created");
  }
  if (sink == NULL) {
    g_print ("Trying videosink '%s' ...", "xvimagesink");
    sink = gst_element_factory_make ("xvimagesink", "sink");
    g_print ("%s\n", (sink) ? "ok" : "element couldn't be created");
  }
  if (sink == NULL) {
    g_print ("Trying videosink '%s' ...", "ximagesink");
    sink = gst_element_factory_make ("ximagesink", "sink");
    g_print ("%s\n", (sink) ? "ok" : "element couldn't be created");
  }

  g_assert (sink != NULL);

  gst_bin_add_many (GST_BIN (pipeline), src, filter1, crop, scale, filter2,
      csp, sink, NULL);

  if (!gst_element_link (src, filter1))
    g_error ("Failed to link videotestsrc to capsfilter1");

  if (!gst_element_link (filter1, crop))
    g_error ("Failed to link capsfilter1 to videocrop");

  if (!gst_element_link (crop, scale))
    g_error ("Failed to link videocrop to videoscale");

  if (!gst_element_link (scale, filter2))
    g_error ("Failed to link videoscale to capsfilter2");

  if (!gst_element_link (filter2, csp))
    g_error ("Failed to link capsfilter2 to ffmpegcolorspace");

  if (!gst_element_link (csp, sink))
    g_error ("Failed to link ffmpegcolorspace to video sink");

  caps_list = video_crop_get_test_caps (crop);
  for (l = caps_list; l != NULL; l = l->next) {
    GstStateChangeReturn ret;
    GstCaps *caps, *out_caps;
    gboolean skip = FALSE;
    gchar *s;

    if (filter_caps) {
      GstCaps *icaps;

      icaps = gst_caps_intersect (filter_caps, GST_CAPS (l->data));
      skip = gst_caps_is_empty (icaps);
      gst_caps_unref (icaps);
    }

    /* this is the size of our window (stays fixed) */
    out_caps = gst_caps_copy (GST_CAPS (l->data));
    gst_structure_set (gst_caps_get_structure (out_caps, 0), "width",
        G_TYPE_INT, OUT_WIDTH, "height", G_TYPE_INT, OUT_HEIGHT, NULL);

    g_object_set (filter2, "caps", out_caps, NULL);

    /* filter1 gets these too to prevent videotestsrc from renegotiating */
    g_object_set (filter1, "caps", out_caps, NULL);
    gst_caps_unref (out_caps);

    caps = gst_caps_copy (GST_CAPS (l->data));
    GST_INFO ("testing format: %" GST_PTR_FORMAT, caps);

    s = gst_caps_to_string (caps);

    if (skip) {
      g_print ("Skipping format: %s\n", s);
      g_free (s);
      continue;
    }

    g_print ("Format: %s\n", s);

    caps = gst_caps_make_writable (caps);

    /* FIXME: check return values */
    ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
    if (ret != GST_STATE_CHANGE_FAILURE) {
      ret = gst_element_get_state (pipeline, NULL, NULL, -1);

      if (ret != GST_STATE_CHANGE_FAILURE) {
        test_with_caps (src, crop, caps);
      } else {
        g_print ("Format: %s not supported (failed to go to PLAYING)\n", s);
      }
    } else {
      g_print ("Format: %s not supported\n", s);
    }

    gst_element_set_state (pipeline, GST_STATE_NULL);

    gst_caps_unref (caps);
    g_free (s);
  }

  g_list_foreach (caps_list, (GFunc) gst_caps_unref, NULL);
  g_list_free (caps_list);

  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);

  return 0;
}
コード例 #6
0
ファイル: mdh.c プロジェクト: lovesnow/linnya
/**
 * ly_gla_fina:
 *
 * Create a new allocate LyMdhMetadata type structure based on a uri. It fills
 * all fields in metadata struct.
 *
 * Returns:		a newly allocated metadata struct.
 */
LyMdhMetadata*	ly_mdh_new_with_uri_full	(char *uri)
{
	/*
	 * test file
	 */
	char *prefix=ly_gla_uri_get_prefix(uri);
	char *path=ly_gla_uri_get_path(uri);
	if(!g_str_equal(prefix, "file://"))
	{
		g_free(prefix);
		ly_log_put_with_flag(G_LOG_LEVEL_DEBUG,  _("Cannot open file: %s. unsupported protocol!"), uri);
		return NULL;
	}
	g_free(prefix);
	if(!g_file_test(path, G_FILE_TEST_EXISTS))
	{
		g_free(path);
		ly_log_put_with_flag(G_LOG_LEVEL_DEBUG,  _("Cannot open file: %s. file not found!"), uri);
		return NULL;
	}
	g_free(path);

	ly_mdh_pipeline=NULL;
	ly_mdh_md_eos=FALSE;

	/*
	 * create and initial metadata
	 */
	LyMdhMetadata *metadata=ly_mdh_new();
	g_strlcpy(metadata->uri, uri, sizeof(metadata->uri));

	/*
	 * build the pipeline
	 */
	GstFormat fmt=GST_FORMAT_TIME;
	GstElement *urisrc;
	GstElement *decodebin;
	GstElement *fakesink;
	GstBus *bus=NULL;
	gint changeTimeout = 0;
	GstStateChangeReturn rt;
	GstMessage *msg;

	ly_mdh_pipeline=gst_pipeline_new("pipeline");
	urisrc=gst_element_make_from_uri(GST_URI_SRC,metadata->uri,"urisrc");
	decodebin=gst_element_factory_make("decodebin","decodebin");
	fakesink=gst_element_factory_make("fakesink","fakesink");

	gst_bin_add_many(GST_BIN(ly_mdh_pipeline),urisrc,decodebin,fakesink,NULL);
	gst_element_link(urisrc,decodebin);

	g_signal_connect_object(G_OBJECT(decodebin),"new-decoded-pad",G_CALLBACK(ly_mdh_new_with_uri_pipe_cb),fakesink,0);

	bus = gst_pipeline_get_bus(GST_PIPELINE(ly_mdh_pipeline));
	gst_element_set_state(ly_mdh_pipeline,GST_STATE_NULL);
	gst_element_set_state(ly_mdh_pipeline,GST_STATE_READY);
	rt = gst_element_set_state(ly_mdh_pipeline,GST_STATE_PAUSED);
	if(!rt)
	{
		if(ly_mdh_pipeline!=NULL)
			gst_object_unref(GST_OBJECT(ly_mdh_pipeline));
		ly_mdh_md_eos=FALSE;
		ly_mdh_pipeline=NULL;
		return FALSE;
	}
	while(rt==GST_STATE_CHANGE_ASYNC && !ly_mdh_md_eos && changeTimeout < 5)
	{
		msg = gst_bus_timed_pop(bus, 1 * GST_SECOND);
		if(msg!=NULL)
		{
			ly_mdh_new_with_uri_full_bus_cb(bus, msg, metadata);
			gst_message_unref(msg);
			changeTimeout = 0;
		}
		else
			changeTimeout++;
		rt = gst_element_get_state(ly_mdh_pipeline, NULL, NULL, 0);
	}
	gst_object_unref(bus);
	ly_mdh_new_with_uri_full_loop_cb(metadata, ly_mdh_pipeline, FALSE);
	if(rt!=GST_STATE_CHANGE_SUCCESS)
	{
		gst_element_set_state(ly_mdh_pipeline,GST_STATE_NULL);
		if(ly_mdh_pipeline!=NULL)
			gst_object_unref(GST_OBJECT(ly_mdh_pipeline));
		return FALSE;
	}

	/*
	 * get duration
	 */
	gint64 dura=0;
	gst_element_query_duration(ly_mdh_pipeline,&fmt,&dura);
	char *duration=ly_mdh_time_int2str(dura);
	g_strlcpy(metadata->duration,duration,sizeof(metadata->duration));
	g_free(duration);

	rt = gst_element_set_state(ly_mdh_pipeline,GST_STATE_NULL);
	gst_object_unref(ly_mdh_pipeline);

	char *str;
	if(g_str_equal(metadata->title,"")||g_str_equal(metadata->title,"unknown"))
	{
		str=ly_gla_uri_get_filename(uri);
		g_strlcpy(metadata->title,str,sizeof(metadata->title));
		g_free(str);
	}

	ly_mdh_pipeline=NULL;
	ly_mdh_md_eos=FALSE;
	return metadata;
}
コード例 #7
0
void PlaybackPipeline::attachTrack(RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivate, RefPtr<TrackPrivateBase> trackPrivate, GstCaps* caps)
{
    WebKitMediaSrc* webKitMediaSrc = m_webKitMediaSrc.get();
    Stream* stream = 0;
    //GstCaps* appsrccaps = 0;
    GstStructure* s = 0;
    const gchar* appsrctypename = 0;
    const gchar* mediaType = 0;
    gchar *parserBinName;
    bool capsNotifyHandlerConnected = false;
    unsigned padId = 0;

    GST_OBJECT_LOCK(webKitMediaSrc);
    stream = getStreamBySourceBufferPrivate(webKitMediaSrc, sourceBufferPrivate.get());
    GST_OBJECT_UNLOCK(webKitMediaSrc);

    ASSERT(stream != 0);

    //gst_app_src_set_caps(GST_APP_SRC(stream->appsrc), caps);
    //appsrccaps = gst_app_src_get_caps(GST_APP_SRC(stream->appsrc));
    s = gst_caps_get_structure(caps, 0);
    appsrctypename = gst_structure_get_name(s);
    mediaType = appsrctypename;

    GST_OBJECT_LOCK(webKitMediaSrc);
    padId = stream->parent->priv->numberOfPads;
    stream->parent->priv->numberOfPads++;
    GST_OBJECT_UNLOCK(webKitMediaSrc);

    parserBinName = g_strdup_printf("streamparser%u", padId);

    ASSERT(caps != 0);

    stream->parser = gst_bin_new(parserBinName);
    g_free(parserBinName);

    GST_DEBUG_OBJECT(webKitMediaSrc, "Configured track %s: appsrc=%s, padId=%u, mediaType=%s, caps=%" GST_PTR_FORMAT, trackPrivate->id().string().utf8().data(), GST_ELEMENT_NAME(stream->appsrc), padId, mediaType, caps);

    if (!g_strcmp0(mediaType, "video/x-h264")) {
        GstElement* parser;
        GstElement* capsfilter;
        GstPad* pad = nullptr;
        GstCaps* filtercaps;

        filtercaps = gst_caps_new_simple("video/x-h264", "alignment", G_TYPE_STRING, "au", NULL);
        parser = gst_element_factory_make("h264parse", 0);
        capsfilter = gst_element_factory_make("capsfilter", 0);
        g_object_set(capsfilter, "caps", filtercaps, NULL);
        gst_caps_unref(filtercaps);

        gst_bin_add_many(GST_BIN(stream->parser), parser, capsfilter, NULL);
        gst_element_link_pads(parser, "src", capsfilter, "sink");

        if (!pad)
            pad = gst_element_get_static_pad(parser, "sink");
        gst_element_add_pad(stream->parser, gst_ghost_pad_new("sink", pad));
        gst_object_unref(pad);

        pad = gst_element_get_static_pad(capsfilter, "src");
        gst_element_add_pad(stream->parser, gst_ghost_pad_new("src", pad));
        gst_object_unref(pad);
    } else if (!g_strcmp0(mediaType, "video/x-h265")) {
        GstElement* parser;
        GstElement* capsfilter;
        GstPad* pad = nullptr;
        GstCaps* filtercaps;

        filtercaps = gst_caps_new_simple("video/x-h265", "alignment", G_TYPE_STRING, "au", NULL);
        parser = gst_element_factory_make("h265parse", 0);
        capsfilter = gst_element_factory_make("capsfilter", 0);
        g_object_set(capsfilter, "caps", filtercaps, NULL);
        gst_caps_unref(filtercaps);

        gst_bin_add_many(GST_BIN(stream->parser), parser, capsfilter, NULL);
        gst_element_link_pads(parser, "src", capsfilter, "sink");

        if (!pad)
            pad = gst_element_get_static_pad(parser, "sink");
        gst_element_add_pad(stream->parser, gst_ghost_pad_new("sink", pad));
        gst_object_unref(pad);

        pad = gst_element_get_static_pad(capsfilter, "src");
        gst_element_add_pad(stream->parser, gst_ghost_pad_new("src", pad));
        gst_object_unref(pad);
    } else if (!g_strcmp0(mediaType, "audio/mpeg")) {
        gint mpegversion = -1;
        GstElement* parser;
        GstPad* pad = nullptr;

        gst_structure_get_int(s, "mpegversion", &mpegversion);
        if (mpegversion == 1) {
            parser = gst_element_factory_make("mpegaudioparse", 0);
        } else if (mpegversion == 2 || mpegversion == 4) {
            parser = gst_element_factory_make("aacparse", 0);
        } else {
            ASSERT_NOT_REACHED();
        }

        gst_bin_add(GST_BIN(stream->parser), parser);

        if (!pad)
            pad = gst_element_get_static_pad(parser, "sink");
        gst_element_add_pad(stream->parser, gst_ghost_pad_new("sink", pad));
        gst_object_unref(pad);

        pad = gst_element_get_static_pad(parser, "src");
        gst_element_add_pad(stream->parser, gst_ghost_pad_new("src", pad));
        gst_object_unref(pad);
    } else {
        GST_ERROR_OBJECT(stream->parent, "Unsupported caps: %" GST_PTR_FORMAT, caps);
        gst_object_unref(GST_OBJECT(stream->parser));
        return;
    }

    //gst_caps_unref(appsrccaps);

    GST_OBJECT_LOCK(webKitMediaSrc);
    stream->type = Unknown;
    GST_OBJECT_UNLOCK(webKitMediaSrc);

    ASSERT(stream->parser);
    gst_bin_add(GST_BIN(stream->parent), stream->parser);
    gst_element_sync_state_with_parent(stream->parser);

    GstPad* sinkpad = gst_element_get_static_pad(stream->parser, "sink");
    GstPad* srcpad = gst_element_get_static_pad(stream->appsrc, "src");
    gst_pad_link(srcpad, sinkpad);
    gst_object_unref(srcpad);
    srcpad = 0;
    gst_object_unref(sinkpad);
    sinkpad = 0;

    srcpad = gst_element_get_static_pad(stream->parser, "src");
    // TODO: Is padId the best way to identify the Stream? What about trackId?
    g_object_set_data(G_OBJECT(srcpad), "id", GINT_TO_POINTER(padId));
    if (!capsNotifyHandlerConnected)
        g_signal_connect(srcpad, "notify::caps", G_CALLBACK(webKitMediaSrcParserNotifyCaps), stream);
    webKitMediaSrcLinkStreamToSrcPad(srcpad, stream);

    ASSERT(stream->parent->priv->mediaPlayerPrivate);
    int signal = -1;
    if (g_str_has_prefix(mediaType, "audio")) {
        GST_OBJECT_LOCK(webKitMediaSrc);
        stream->type = Audio;
        stream->parent->priv->nAudio++;
        GST_OBJECT_UNLOCK(webKitMediaSrc);
        signal = SIGNAL_AUDIO_CHANGED;

        stream->audioTrack = RefPtr<WebCore::AudioTrackPrivateGStreamer>(static_cast<WebCore::AudioTrackPrivateGStreamer*>(trackPrivate.get()));
    } else if (g_str_has_prefix(mediaType, "video")) {
        GST_OBJECT_LOCK(webKitMediaSrc);
        stream->type = Video;
        stream->parent->priv->nVideo++;
        GST_OBJECT_UNLOCK(webKitMediaSrc);
        signal = SIGNAL_VIDEO_CHANGED;

        stream->videoTrack = RefPtr<WebCore::VideoTrackPrivateGStreamer>(static_cast<WebCore::VideoTrackPrivateGStreamer*>(trackPrivate.get()));
    } else if (g_str_has_prefix(mediaType, "text")) {
        GST_OBJECT_LOCK(webKitMediaSrc);
        stream->type = Text;
        stream->parent->priv->nText++;
        GST_OBJECT_UNLOCK(webKitMediaSrc);
        signal = SIGNAL_TEXT_CHANGED;

        // TODO: Support text tracks.
    }

    if (signal != -1)
        g_signal_emit(G_OBJECT(stream->parent), webkit_media_src_signals[signal], 0, NULL);

    gst_object_unref(srcpad);
    srcpad = 0;
}
コード例 #8
0
ファイル: banshee-player-vis.c プロジェクト: Rizean/banshee
void
_bp_vis_pipeline_setup (BansheePlayer *player)
{
    // The basic pipeline we're constructing is:
    // .audiotee ! queue ! audioresample ! audioconvert ! fakesink

    GstElement *fakesink, *converter, *resampler, *audiosinkqueue;
    GstCaps *caps;
    GstPad *pad;
    gint wanted_size;
    
    player->vis_buffer = NULL;
    player->vis_fft = gst_fft_f32_new (SLICE_SIZE * 2, FALSE);
    player->vis_fft_buffer = g_new (GstFFTF32Complex, SLICE_SIZE + 1);
    player->vis_fft_sample_buffer = g_new0 (gfloat, SLICE_SIZE);
    
    // Core elements, if something fails here, it's the end of the world
    audiosinkqueue = gst_element_factory_make ("queue", "vis-queue");

    pad = gst_element_get_static_pad (audiosinkqueue, "sink");
    gst_pad_add_event_probe (pad, G_CALLBACK (_bp_vis_pipeline_event_probe), player);
    gst_object_unref (GST_OBJECT (pad));

    resampler = gst_element_factory_make ("audioresample", "vis-resample");
    converter = gst_element_factory_make ("audioconvert", "vis-convert");
    fakesink = gst_element_factory_make ("fakesink", "vis-sink");

    // channels * slice size * float size = size of chunks we want
    wanted_size = 2 * SLICE_SIZE * sizeof(gfloat);

    if (audiosinkqueue == NULL || resampler == NULL || converter == NULL || fakesink == NULL) {
        bp_debug ("Could not construct visualization pipeline, a fundamental element could not be created");
        return;
    }

    // Keep around the 5 most recent seconds of audio so that when resuming
    // visualization we have something to show right away.
    g_object_set (G_OBJECT (audiosinkqueue),
            "leaky", 2,
            "max-size-buffers", 0,
            "max-size-bytes", 0,
            "max-size-time", GST_SECOND * 5,
            NULL);
    
    g_signal_connect (G_OBJECT (fakesink), "handoff", G_CALLBACK (bp_vis_pcm_handoff), player);

    g_object_set (G_OBJECT (fakesink),
            // This enables the handoff signal.
            "signal-handoffs", TRUE,
            // Synchronize so we see vis at the same time as we hear it.
            "sync", TRUE,
            // Drop buffers if they come in too late.  This is mainly used when
            // thawing the vis pipeline.
            "max-lateness", GST_SECOND / 120,
            // Deliver buffers one frame early.  This allows for rendering
            // time.  (TODO: It would be great to calculate this on-the-fly so
            // we match the rendering time.
            "ts-offset", -GST_SECOND / 60,
            // Don't go to PAUSED when we freeze the pipeline.
            "async", FALSE, NULL);
    
    gst_bin_add_many (GST_BIN (player->audiobin), audiosinkqueue, resampler,
                      converter, fakesink, NULL);
    
    pad = gst_element_get_static_pad (audiosinkqueue, "sink");
    gst_pad_link (gst_element_get_request_pad (player->audiotee, "src%d"), pad);
    gst_object_unref (GST_OBJECT (pad));
    
    gst_element_link_many (audiosinkqueue, resampler, converter, NULL);
    
    caps = gst_static_caps_get (&vis_data_sink_caps);
    gst_element_link_filtered (converter, fakesink, caps);
    gst_caps_unref (caps);
    
    player->vis_buffer = gst_adapter_new ();
    player->vis_resampler = resampler;
    player->vis_thawing = FALSE;
    player->vis_enabled = FALSE;

    // Disable the pipeline till we hear otherwise from managed land.
    _bp_vis_pipeline_set_blocked (player, TRUE);
}
コード例 #9
0
ファイル: main.cpp プロジェクト: zsx/ossbuild
gint main (gint argc, gchar *argv[])
{

    if (argc != 2)
    {
        g_warning ("usage: doublecube.exe videolocation\n");
        return -1;
    }

    std::string video_location(argv[1]);

    /* initialization */
    gst_init (&argc, &argv);
    GMainLoop* loop = g_main_loop_new (NULL, FALSE);

    /* create elements */
    GstElement* pipeline = gst_pipeline_new ("pipeline");

    /* watch for messages on the pipeline's bus (note that this will only
     * work like this when a GLib main loop is running) */
    GstBus* bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
    gst_bus_add_watch (bus, bus_call, loop);
    gst_object_unref (bus);

    /* create elements */
    GstElement* videosrc = gst_element_factory_make ("filesrc", "filesrc0");
    GstElement* decodebin = gst_element_factory_make ("decodebin", "decodebin0");
    GstElement* identity  = gst_element_factory_make ("identity", "identity0");
    GstElement* textoverlay = gst_element_factory_make ("textoverlay", "textoverlay0"); //textoverlay required I420
    GstElement* tee = gst_element_factory_make ("tee", "tee0");

    GstElement* queue0 = gst_element_factory_make ("queue", "queue0");
    GstElement* glupload0  = gst_element_factory_make ("glupload", "glupload0");
    GstElement* glimagesink0  = gst_element_factory_make ("glimagesink", "glimagesink0");

    GstElement* queue1 = gst_element_factory_make ("queue", "queue1");
    GstElement* glupload1  = gst_element_factory_make ("glupload", "glupload1");
    GstElement* glfiltercube  = gst_element_factory_make ("glfiltercube", "glfiltercube");
    GstElement* glimagesink1  = gst_element_factory_make ("glimagesink", "glimagesink1");

    GstElement* queue2 = gst_element_factory_make ("queue", "queue2");
    GstElement* glimagesink2  = gst_element_factory_make ("glimagesink", "glimagesink2");


    if (!videosrc || !decodebin || !identity || !textoverlay || !tee ||
            !queue0 || !glupload0 || !glimagesink0 ||
            !queue1 || !glupload1 || !glfiltercube || !glimagesink1 ||
            !queue2 || !glimagesink2)
    {
        g_warning ("one element could not be found \n");
        return -1;
    }

    GstCaps* cubecaps = gst_caps_new_simple("video/x-raw-gl",
                                            "width", G_TYPE_INT, 600,
                                            "height", G_TYPE_INT, 400,
                                            NULL) ;

    /* configure elements */
    g_object_set(G_OBJECT(videosrc), "num-buffers", 1000, NULL);
    g_object_set(G_OBJECT(videosrc), "location", video_location.c_str(), NULL);
    g_signal_connect(identity, "handoff", G_CALLBACK(identityCallback), textoverlay) ;
    g_object_set(G_OBJECT(textoverlay), "font_desc", "Ahafoni CLM Bold 30", NULL);
    g_object_set(G_OBJECT(glimagesink0), "client-reshape-callback", reshapeCallback, NULL);
    g_object_set(G_OBJECT(glimagesink0), "client-draw-callback", drawCallback, NULL);

    /* add elements */
    gst_bin_add_many (GST_BIN (pipeline), videosrc, decodebin, identity, textoverlay, tee,
                      queue0, glupload0, glimagesink0,
                      queue1, glupload1, glfiltercube, glimagesink1,
                      queue2, glimagesink2, NULL);


    gst_element_link_pads (videosrc, "src", decodebin, "sink");

    g_signal_connect (decodebin, "new-decoded-pad", G_CALLBACK (cb_new_pad), identity);

    if (!gst_element_link_pads(identity, "src", textoverlay, "video_sink"))
    {
        g_print ("Failed to link identity to textoverlay!\n");
        return -1;
    }

    if (!gst_element_link_many(textoverlay, tee, queue0, glupload0, NULL))
    {
        g_warning ("Failed to link one or more elements bettween textoverlay and glupload0!\n");
        return -1;
    }

    gboolean link_ok = gst_element_link_filtered(glupload0, glimagesink0, cubecaps) ;
    gst_caps_unref(cubecaps) ;
    if(!link_ok)
    {
        g_warning("Failed to link glupload0 to glimagesink0!\n") ;
        return -1 ;
    }

    if (!gst_element_link_many(tee, queue1, glupload1, glfiltercube, glimagesink1, NULL))
    {
        g_warning ("Failed to link one or more elements bettween tee and glimagesink1!\n");
        return -1;
    }

    if (!gst_element_link_many(tee, queue2, glimagesink2, NULL))
    {
        g_warning ("Failed to link one or more elements bettween tee and glimagesink2!\n");
        return -1;
    }

    /* run */
    GstStateChangeReturn ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
    if (ret == GST_STATE_CHANGE_FAILURE)
    {
        g_print ("Failed to start up pipeline!\n");

        /* check if there is an error message with details on the bus */
        GstMessage* msg = gst_bus_poll (bus, GST_MESSAGE_ERROR, 0);
        if (msg)
        {
            GError *err = NULL;

            gst_message_parse_error (msg, &err, NULL);
            g_print ("ERROR: %s\n", err->message);
            g_error_free (err);
            gst_message_unref (msg);
        }
        return -1;
    }

    g_main_loop_run (loop);

    /* clean up */
    gst_element_set_state (pipeline, GST_STATE_NULL);
    gst_object_unref (pipeline);

    return 0;

}
コード例 #10
0
static gboolean
gst_ss_demux_create_dummy_pipe (GstSSDemux * demux, GstSSDemuxStream *stream)
{
  gchar *name = NULL;
  GstCaps *caps = NULL;
  GstElement *capsfilter = NULL;
  GstElement *enc = NULL;
  guint64 avg_dur = -1;
  guint frame_rate = 0;

  name = g_strdup_printf("%s-%s", stream->name, "dummy");

  stream->pipe = gst_pipeline_new (name);
  if (!stream->pipe) {
    GST_ERROR_OBJECT (demux, "failed to create pipeline");
    return FALSE;
  }
  g_free(name);

  /* create dummy sender source */
  name = g_strdup_printf("%s-%s", stream->name, "dummysrc");
  stream->urisrc = gst_element_factory_make ("imagereader", name);
  if (!stream->urisrc) {
    GST_ERROR_OBJECT (demux,"failed to create filesrc element");
    return FALSE;
  }
  g_free(name);
  g_object_set (G_OBJECT (stream->urisrc), "location", "/opt/home/root/aonly_VGA_1frame_I420.yuv", NULL);
  g_object_set (G_OBJECT (stream->urisrc), "framerate", 25, NULL);
  g_object_set (G_OBJECT (stream->urisrc), "num-buffers", 60, NULL);

  /* caps filter */
  capsfilter = gst_element_factory_make ("capsfilter", NULL);
  if (!capsfilter) {
    GST_ERROR_OBJECT (demux, "failed to create capsfilter element");
    return FALSE;
  }
  caps = gst_caps_new_simple ("video/x-raw-yuv",
                  "width", G_TYPE_INT, 640,
                  "height", G_TYPE_INT, 480,
                  "framerate",GST_TYPE_FRACTION, 25,1,
                  "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('I', '4', '2', '0'),
                  NULL);
  g_object_set (G_OBJECT (capsfilter), "caps", caps,  NULL);

  /* create h264parse element */
  enc = gst_element_factory_make ("savsenc_h264", "H264 encoder");
  if (!enc) {
    GST_ERROR_OBJECT (demux, "failed to create h264 parse element");
    return FALSE;
  }
  name = g_strdup_printf("%s-%s", stream->name, "sink");
  stream->sink = gst_element_factory_make ("appsink", name);
  if (!stream->sink) {
    GST_ERROR_OBJECT (demux, "failed to create appsink element");
    return FALSE;
  }
  g_free(name);
  g_object_set (G_OBJECT (stream->sink), "emit-signals", TRUE, "sync", FALSE, NULL);
  g_signal_connect (stream->sink, "new-buffer",  G_CALLBACK (gst_ssm_demux_on_new_buffer), stream);

  /* add to pipeline & link all elements */
  gst_bin_add_many (GST_BIN (stream->pipe), stream->urisrc, capsfilter, enc, stream->sink, NULL);

  if (!gst_element_link_many (stream->urisrc, capsfilter, enc, stream->sink, NULL)) {
    GST_ERROR_OBJECT (demux,"failed to link dummy pipe elements...");
    return FALSE;
  }

  stream->bus = gst_pipeline_get_bus (GST_PIPELINE (stream->pipe));
  gst_bus_add_watch (stream->bus, (GstBusFunc)gst_ss_demux_download_bus_cb, stream);
  gst_object_unref (stream->bus);

  return TRUE;
}
コード例 #11
0
static gboolean
gst_ss_demux_create_dummy_pipe (GstSSDemux * demux, GstSSDemuxStream *stream)
{
  gchar *name = NULL;
  GstBus *bus = NULL;
  GstCaps *caps = NULL;

  name = g_strdup_printf("%s-%s", stream->name, "dummy");

  stream->pipe = gst_pipeline_new (name);
  if (!stream->pipe) {
    GST_ERROR_OBJECT (demux, "failed to create pipeline");
    return FALSE;
  }
  g_free(name);

  /* create dummy sender source */
  name = g_strdup_printf("%s-%s", stream->name, "dummysrc");
  stream->urisrc = gst_element_factory_make ("filesrc", name);
  if (!stream->urisrc) {
    GST_ERROR_OBJECT (demux,"failed to create filesrc element");
    return FALSE;
  }
  g_free(name);
  g_object_set (G_OBJECT (stream->urisrc), "location", "/opt/home/root/sound_2sec.264", NULL);

  /* create appsink element */
  name = g_strdup_printf("%s-%s", stream->name, "parser");
  stream->parser= gst_element_factory_make ("legacyh264parse", name);
  if (!stream->parser) {
    GST_ERROR_OBJECT (demux, "failed to create h264 parse element");
    return FALSE;
  }
  g_object_set (G_OBJECT (stream->parser), "output-format", 1, NULL);

  /* create appsink element */
  name = g_strdup_printf("%s-%s", stream->name, "sink");
  stream->sink = gst_element_factory_make ("appsink", name);
  if (!stream->sink) {
    GST_ERROR_OBJECT (demux, "failed to create appsink element");
    return FALSE;
  }
  g_object_set (G_OBJECT (stream->sink), "emit-signals", TRUE, "sync", FALSE, NULL);

  caps = gst_caps_new_simple ("video/x-h264",
                  "width", G_TYPE_INT, 640,
                  "height", G_TYPE_INT, 480,
                  "stream-format", G_TYPE_STRING, "byte-stream",
                  NULL);
  g_object_set (G_OBJECT (stream->sink), "caps", caps, NULL);

  g_signal_connect (stream->sink, "new-buffer",  G_CALLBACK (gst_ssm_demux_on_new_buffer), stream);
  g_free(name);

  /* add to pipeline & link all elements */
  gst_bin_add_many (GST_BIN (stream->pipe), stream->urisrc, stream->parser, stream->sink, NULL);
  if (!gst_element_link_many (stream->urisrc, stream->parser, stream->sink, NULL)) {
    GST_ERROR_OBJECT (demux,"failed to link elements...");
    return FALSE;
  }

  bus = gst_pipeline_get_bus (GST_PIPELINE (stream->pipe));
  gst_bus_add_watch (bus, (GstBusFunc)gst_ss_demux_download_bus_cb, stream);
  gst_object_unref (bus);

  return TRUE;
}
コード例 #12
0
static gboolean
gst_ss_demux_create_download_pipe (GstSSDemux * demux, GstSSDemuxStream *stream, const gchar * uri, guint64 start_ts)
{
  gchar *name = NULL;
  GstCaps *caps = NULL;

  if (!gst_uri_is_valid (uri))
    return FALSE;

  name = g_strdup_printf("%s-%s", stream->name, "downloader");

  stream->pipe = gst_pipeline_new (name);
  if (!stream->pipe) {
    GST_ERROR_OBJECT (demux, "failed to create pipeline");
    return FALSE;
  }
  g_free(name);

  name = g_strdup_printf("%s-%s", stream->name, "httpsrc");
  GST_DEBUG ("Creating source element for the URI:%s", uri);
  stream->urisrc = gst_element_make_from_uri (GST_URI_SRC, uri, name);
  if (!stream->urisrc) {
    GST_ERROR_OBJECT (demux, "failed to create urisrc");
    return FALSE;
  }
  g_free(name);

  if (GST_SSM_PARSE_IS_LIVE_PRESENTATION(demux->parser))
    g_object_set (G_OBJECT (stream->urisrc), "is-live", TRUE, NULL);
  else
    g_object_set (G_OBJECT (stream->urisrc), "is-live", FALSE, NULL);

  name = g_strdup_printf("%s-%s", stream->name, "parser");
  stream->parser = gst_element_factory_make ("piffdemux", name);
  if (!stream->parser) {
    GST_ERROR_OBJECT (demux, "failed to create piffdemux element");
    return FALSE;
  }

  caps = ssm_parse_get_stream_caps (demux->parser, stream->type);
  GST_INFO_OBJECT (stream->pad, "prepare caps = %s", gst_caps_to_string(caps));

  g_object_set (G_OBJECT (stream->parser), "caps", caps, NULL);
  g_object_set (G_OBJECT (stream->parser), "start-ts", start_ts, NULL);
  g_object_set (G_OBJECT (stream->parser), "duration", GST_SSM_PARSE_GET_DURATION(demux->parser), NULL);
  g_object_set (G_OBJECT (stream->parser), "is-live", GST_SSM_PARSE_IS_LIVE_PRESENTATION(demux->parser), NULL);
  g_object_set (G_OBJECT (stream->parser), "lookahead-count", GST_SSM_PARSE_LOOKAHEAD_COUNT(demux->parser), NULL);
  g_signal_connect (stream->parser, "live-param",  G_CALLBACK (gst_ss_demux_append_live_params), stream);

  g_free(name);

  name = g_strdup_printf("%s-%s", stream->name, "sink");
  stream->sink = gst_element_factory_make ("appsink", name);
  if (!stream->sink) {
    GST_ERROR_OBJECT (demux, "failed to create appsink element");
    return FALSE;
  }
  g_object_set (G_OBJECT (stream->sink), "emit-signals", TRUE, "sync", FALSE, NULL);
  g_signal_connect (stream->sink, "new-buffer",  G_CALLBACK (gst_ssm_demux_on_new_buffer), stream);

  g_free(name);

  gst_bin_add_many (GST_BIN (stream->pipe), stream->urisrc, stream->parser, stream->sink, NULL);
  if (!gst_element_link_many (stream->urisrc, stream->parser, stream->sink, NULL)) {
    GST_ERROR ("failed to link elements...");
    return FALSE;
  }

  stream->bus = gst_pipeline_get_bus (GST_PIPELINE (stream->pipe));
  gst_bus_add_watch (stream->bus, (GstBusFunc)gst_ss_demux_download_bus_cb, stream);
  gst_object_unref (stream->bus);

  return TRUE;
}
コード例 #13
0
ファイル: gstdecode.c プロジェクト: fabn/vlc
/*****************************************************************************
 * OpenDecoder: probe the decoder and return score
 *****************************************************************************/
static int OpenDecoder( vlc_object_t *p_this )
{
    decoder_t *p_dec = ( decoder_t* )p_this;
    decoder_sys_t *p_sys;
    GstStateChangeReturn i_ret;
    gboolean b_ret;
    sink_src_caps_t caps = { NULL, NULL };
    GstStructure *p_str;
    GstAppSrcCallbacks cb;
    int i_rval = VLC_SUCCESS;
    GList *p_list;
    bool dbin;

#define VLC_GST_CHECK( r, v, s, t ) \
    { if( r == v ){ msg_Err( p_dec, s ); i_rval = t; goto fail; } }

    if( !vlc_gst_init( ))
    {
        msg_Err( p_dec, "failed to register vlcvideosink" );
        return VLC_EGENERIC;
    }

    p_str = vlc_to_gst_fmt( &p_dec->fmt_in );
    if( !p_str )
        return VLC_EGENERIC;

    /* Allocate the memory needed to store the decoder's structure */
    p_sys = p_dec->p_sys = calloc( 1, sizeof( *p_sys ) );
    if( p_sys == NULL )
    {
        gst_structure_free( p_str );
        return VLC_ENOMEM;
    }

    dbin = var_CreateGetBool( p_dec, "use-decodebin" );
    msg_Dbg( p_dec, "Using decodebin? %s", dbin ? "yes ":"no" );

    caps.p_sinkcaps = gst_caps_new_empty( );
    gst_caps_append_structure( caps.p_sinkcaps, p_str );
    /* Currently supports only system memory raw output format */
    caps.p_srccaps = gst_caps_new_empty_simple( "video/x-raw" );

    /* Get the list of all the available gstreamer decoders */
    p_list = gst_element_factory_list_get_elements(
            GST_ELEMENT_FACTORY_TYPE_DECODER, GST_RANK_MARGINAL );
    VLC_GST_CHECK( p_list, NULL, "no decoder list found", VLC_ENOMOD );
    if( !dbin )
    {
        GList *p_l;
        /* Sort them as per ranks */
        p_list = g_list_sort( p_list, gst_plugin_feature_rank_compare_func );
        VLC_GST_CHECK( p_list, NULL, "failed to sort decoders list",
                VLC_ENOMOD );
        p_l = g_list_find_custom( p_list, &caps, find_decoder_func );
        VLC_GST_CHECK( p_l, NULL, "no suitable decoder found",
                VLC_ENOMOD );
        /* create the decoder with highest rank */
        p_sys->p_decode_in = gst_element_factory_create(
                ( GstElementFactory* )p_l->data, NULL );
        VLC_GST_CHECK( p_sys->p_decode_in, NULL,
                "failed to create decoder", VLC_ENOMOD );
    }
    else
    {
        GList *p_l;
        /* Just check if any suitable decoder exists, rest will be
         * handled by decodebin */
        p_l = g_list_find_custom( p_list, &caps, find_decoder_func );
        VLC_GST_CHECK( p_l, NULL, "no suitable decoder found",
                VLC_ENOMOD );
    }
    gst_plugin_feature_list_free( p_list );
    p_list = NULL;
    gst_caps_unref( caps.p_srccaps );
    caps.p_srccaps = NULL;

    p_sys->b_prerolled = false;
    p_sys->b_running = false;

    /* Queue: GStreamer thread will dump buffers into this queue,
     * DecodeBlock() will pop out the buffers from the queue */
    p_sys->p_que = gst_atomic_queue_new( 0 );
    VLC_GST_CHECK( p_sys->p_que, NULL, "failed to create queue",
            VLC_ENOMEM );

    p_sys->p_decode_src = gst_element_factory_make( "appsrc", NULL );
    VLC_GST_CHECK( p_sys->p_decode_src, NULL, "appsrc not found",
            VLC_ENOMOD );
    g_object_set( G_OBJECT( p_sys->p_decode_src ), "caps", caps.p_sinkcaps,
            "emit-signals", TRUE, "format", GST_FORMAT_BYTES,
            "stream-type", GST_APP_STREAM_TYPE_SEEKABLE,
            /* Making DecodeBlock() to block on appsrc with max queue size of 1 byte.
             * This will make the push_buffer() tightly coupled with the buffer
             * flow from appsrc -> decoder. push_buffer() will only return when
             * the same buffer it just fed to appsrc has also been fed to the
             * decoder element as well */
            "block", TRUE, "max-bytes", ( guint64 )1, NULL );
    gst_caps_unref( caps.p_sinkcaps );
    caps.p_sinkcaps = NULL;
    cb.enough_data = cb.need_data = NULL;
    cb.seek_data = seek_data_cb;
    gst_app_src_set_callbacks( GST_APP_SRC( p_sys->p_decode_src ),
            &cb, p_dec, NULL );

    if( dbin )
    {
        p_sys->p_decode_in = gst_element_factory_make( "decodebin", NULL );
        VLC_GST_CHECK( p_sys->p_decode_in, NULL, "decodebin not found",
                VLC_ENOMOD );
        //g_object_set( G_OBJECT( p_sys->p_decode_in ),
        //"max-size-buffers", 2, NULL );
        //g_signal_connect( G_OBJECT( p_sys->p_decode_in ), "no-more-pads",
                //G_CALLBACK( no_more_pads_cb ), p_dec );
        g_signal_connect( G_OBJECT( p_sys->p_decode_in ), "pad-added",
                G_CALLBACK( pad_added_cb ), p_dec );

    }

    /* videosink: will emit signal for every available buffer */
    p_sys->p_decode_out = gst_element_factory_make( "vlcvideosink", NULL );
    VLC_GST_CHECK( p_sys->p_decode_out, NULL, "vlcvideosink not found",
            VLC_ENOMOD );
    p_sys->p_allocator = gst_vlc_picture_plane_allocator_new(
            (gpointer) p_dec );
    g_object_set( G_OBJECT( p_sys->p_decode_out ), "sync", FALSE, "allocator",
            p_sys->p_allocator, "id", (gpointer) p_dec, NULL );
    g_signal_connect( G_OBJECT( p_sys->p_decode_out ), "new-buffer",
            G_CALLBACK( frame_handoff_cb ), p_dec );

    //FIXME: caps_signal
#if 0
    g_signal_connect( G_OBJECT( p_sys->p_decode_out ), "new-caps",
            G_CALLBACK( caps_handoff_cb ), p_dec );
#else
    GST_VLC_VIDEO_SINK( p_sys->p_decode_out )->new_caps = caps_handoff_cb;
#endif

    p_sys->p_decoder = GST_ELEMENT( gst_bin_new( "decoder" ) );
    VLC_GST_CHECK( p_sys->p_decoder, NULL, "bin not found", VLC_ENOMOD );
    p_sys->p_bus = gst_bus_new( );
    VLC_GST_CHECK( p_sys->p_bus, NULL, "failed to create bus",
            VLC_ENOMOD );
    gst_element_set_bus( p_sys->p_decoder, p_sys->p_bus );

    gst_bin_add_many( GST_BIN( p_sys->p_decoder ),
            p_sys->p_decode_src, p_sys->p_decode_in,
            p_sys->p_decode_out, NULL );
    gst_object_ref( p_sys->p_decode_src );
    gst_object_ref( p_sys->p_decode_in );
    gst_object_ref( p_sys->p_decode_out );

    b_ret = gst_element_link( p_sys->p_decode_src, p_sys->p_decode_in );
    VLC_GST_CHECK( b_ret, FALSE, "failed to link src <-> in",
            VLC_EGENERIC );

    if( !dbin )
    {
        b_ret = gst_element_link( p_sys->p_decode_in, p_sys->p_decode_out );
        VLC_GST_CHECK( b_ret, FALSE, "failed to link in <-> out",
                VLC_EGENERIC );
    }

    p_dec->fmt_out.i_cat = p_dec->fmt_in.i_cat;

    /* set the pipeline to playing */
    i_ret = gst_element_set_state( p_sys->p_decoder, GST_STATE_PLAYING );
    VLC_GST_CHECK( i_ret, GST_STATE_CHANGE_FAILURE,
            "set state failure", VLC_EGENERIC );
    p_sys->b_running = true;

    /* Set callbacks */
    p_dec->pf_decode_video = DecodeBlock;
    p_dec->pf_flush        = Flush;

    return VLC_SUCCESS;

fail:
    if( caps.p_sinkcaps )
        gst_caps_unref( caps.p_sinkcaps );
    if( caps.p_srccaps )
        gst_caps_unref( caps.p_srccaps );
    if( p_list )
        gst_plugin_feature_list_free( p_list );
    CloseDecoder( ( vlc_object_t* )p_dec );
    return i_rval;
}
コード例 #14
0
ファイル: mdh.c プロジェクト: lovesnow/linnya
gboolean	ly_mdh_push(LyMdhMetadata *md)
{
	if(!md||!g_str_has_prefix(md->uri, "file://"))
		return FALSE;
	
	if(!g_mutex_trylock(ly_mdh_put_mutex))
	{
		ly_log_put_with_flag(G_LOG_LEVEL_WARNING, _("An old task is running, Tag Failed!"));
		return FALSE;
	}
	/*
	 * BUILD
	 */
	GstElement *filesrc=NULL;
	GstElement *demux=NULL;
	GstElement *mux=NULL;
	GstElement *parse=NULL;
	GstElement *filesink=NULL;
	GstElement *tagger=NULL;
	GstBus *bus=NULL;
	
	const gchar *codec=NULL;
	ly_mdh_put_pipeline=gst_pipeline_new("pipeline");
	filesrc=gst_element_factory_make("filesrc","filesrc");
	filesink=gst_element_factory_make("filesink","filesink");
	if(!ly_mdh_put_pipeline||!filesrc||!filesink)
	{
		if(ly_mdh_put_pipeline);
			gst_object_unref(ly_mdh_put_pipeline);
		g_mutex_unlock(ly_mdh_put_mutex);
		return FALSE;
	}
	
	//MP3
	if(strstr(md->codec,"MP3")!=NULL)
	{
		demux=gst_element_factory_make("id3demux","demux");
		tagger=gst_element_factory_make("id3v2mux","tagger");
		codec = "LAME";
		if(!demux||!tagger)
		{
			gst_object_unref(ly_mdh_put_pipeline);
			g_mutex_unlock(ly_mdh_put_mutex);
			return FALSE;
		}
	}
	//OGG
	else if(strstr(md->codec,"Vorbis")!=NULL)
	{
		tagger = gst_element_factory_make("vorbistag", "tagger");
		demux=gst_element_factory_make("oggdemux","demux");
		mux=gst_element_factory_make("oggmux","mux");
		parse = gst_element_factory_make("vorbisparse", "parse");
		codec = "Vorbis";
		if(!demux||!mux||!tagger||!parse)
		{
			gst_object_unref(ly_mdh_put_pipeline);
			g_mutex_unlock(ly_mdh_put_mutex);
			return FALSE;
		}
	}
	//FLAC
	else if(strstr(md->codec,"FLAC")!=NULL)
	{
		tagger = gst_element_factory_make("flactag", "tagger");
		codec="FLAC";
		if(!tagger)
		{
			gst_object_unref(ly_mdh_put_pipeline);
			g_mutex_unlock(ly_mdh_put_mutex);
			return FALSE;
		}
	}
	//APE
	else if(strstr(md->codec,"Monkey's Audio")!=NULL)
	{
		demux=gst_element_factory_make("apedemux","demux");
		tagger=gst_element_factory_make("apev2mux","tagger");
		codec="LAME";
		if(!demux||!tagger)
		{
			gst_object_unref(ly_mdh_put_pipeline);
			g_mutex_unlock(ly_mdh_put_mutex);
			return FALSE;
		}
	}
	else
	{
		gst_object_unref(ly_mdh_put_pipeline);
		g_mutex_unlock(ly_mdh_put_mutex);
		return FALSE;
	}

	/*
	 * SET
	 */
	gchar location_i[1024]="";
	gchar location_o[1024]="";
	g_snprintf(location_i, sizeof(location_i), "%s", md->uri+7);
	g_snprintf(location_o, sizeof(location_o), "%s%s-%s.audio", LY_GLA_TEMPDIR, md->artist, md->title);
	g_object_set(G_OBJECT(filesrc), "location", location_i, NULL);
	g_object_set(G_OBJECT(filesink), "location", location_o, NULL);

	gst_tag_setter_add_tags(GST_TAG_SETTER(tagger),
							GST_TAG_MERGE_REPLACE_ALL,
							GST_TAG_TITLE, md->title,
							GST_TAG_ARTIST, md->artist,
							GST_TAG_ALBUM, md->album,
							GST_TAG_GENRE, md->genre,
							GST_TAG_TRACK_NUMBER, md->track,
							GST_TAG_ENCODER, "Linnya",
							GST_TAG_ENCODER_VERSION, 1,
							GST_TAG_CODEC,codec,
							NULL);
	
	/*
	 *LINK
	 */
	//MP3
	if(strstr(md->codec,"MP3")!=NULL)
	{
		gst_bin_add_many(GST_BIN(ly_mdh_put_pipeline), filesrc, demux,tagger,filesink, NULL);
		g_signal_connect(demux, "pad-added",G_CALLBACK(ly_mdh_push_add_id3_pad_cb), tagger);
		gst_element_link(filesrc, demux);
		gst_element_link(tagger, filesink);
	}
	//OGG
	else if(strstr(md->codec,"Vorbis")!=NULL)
	{
		gst_bin_add_many(GST_BIN(ly_mdh_put_pipeline), filesrc, demux, tagger, parse, mux, filesink, NULL);
		g_signal_connect(demux, "pad-added",G_CALLBACK(ly_mdh_push_add_ogg_pad_cb), tagger);
		gst_element_link(filesrc, demux);
		gst_element_link_many(tagger, parse, mux, filesink,NULL);
	}
	//FLAC
	else if(strstr(md->codec,"FLAC")!=NULL)
	{
		gst_bin_add_many(GST_BIN(ly_mdh_put_pipeline), filesrc, tagger, filesink, NULL);
		gst_element_link_many(filesrc, tagger, filesink, NULL);
	}
	//APE
	else if(strstr(md->codec,"Monkey's Audio")!=NULL)
	{
		gst_bin_add_many(GST_BIN(ly_mdh_put_pipeline), filesrc, demux,tagger,filesink, NULL);
		g_signal_connect(demux, "pad-added",G_CALLBACK(ly_mdh_push_add_id3_pad_cb), tagger);
		gst_element_link(filesrc, demux);
		gst_element_link(tagger, filesink);
	}
	else
	{
		gst_object_unref(ly_mdh_put_pipeline);
		g_mutex_unlock(ly_mdh_put_mutex);
		return FALSE;
	}
	
	bus = gst_pipeline_get_bus(GST_PIPELINE(ly_mdh_put_pipeline));
	gst_bus_add_watch(bus, (GstBusFunc)ly_mdh_push_handler_cb,  g_memdup(md,sizeof(LyMdhMetadata)));
	gst_object_unref(bus);
	gst_element_set_state(ly_mdh_put_pipeline, GST_STATE_NULL);
	gst_element_set_state(ly_mdh_put_pipeline, GST_STATE_READY);
	if(gst_element_set_state(ly_mdh_put_pipeline, GST_STATE_PLAYING)==GST_STATE_CHANGE_FAILURE)
	{
		gst_element_set_state(ly_mdh_put_pipeline, GST_STATE_NULL);
		gst_object_unref(ly_mdh_put_pipeline);
		g_mutex_unlock(ly_mdh_put_mutex);
		return FALSE;
	}
	return TRUE;
}
コード例 #15
0
ファイル: util.c プロジェクト: rodrimc/libmicromb
gboolean
set_audio_bin(GstElement *bin, MbMedia *media, GstPad *decoder_src_pad)
{
  GstPad *sink_pad = NULL, *ghost_pad = NULL, *output_sink_pad = NULL;
  GstCaps *caps = NULL;
  GstPadLinkReturn ret;
  int return_code = TRUE;

  media->audio_volume = gst_element_factory_make ("volume", NULL);
  g_assert (media->audio_volume);

  media->audio_converter = gst_element_factory_make ("audioconvert", NULL);
  g_assert (media->audio_converter);

  media->audio_resampler = gst_element_factory_make ("audioresample", NULL);
  g_assert (media->audio_resampler);

  media->audio_filter = gst_element_factory_make ("capsfilter", NULL);
  g_assert (media->audio_filter);

  gst_element_set_state (media->audio_volume, GST_STATE_PAUSED);
  gst_element_set_state (media->audio_converter, GST_STATE_PAUSED);
  gst_element_set_state (media->audio_resampler, GST_STATE_PAUSED);
  gst_element_set_state (media->audio_filter, GST_STATE_PAUSED);

  caps = gst_caps_from_string (audio_caps);
  g_assert (caps);

  g_object_set (media->audio_filter, "caps", caps, NULL);
  gst_caps_unref(caps);

  gst_bin_add_many (GST_BIN(bin), media->audio_volume, media->audio_converter,
      media->audio_resampler, media->audio_filter, NULL);

  if (!gst_element_link_many (media->audio_volume, media->audio_converter,
        media->audio_resampler, media->audio_filter, NULL))
  {
    g_debug ("Could not link audio_converter and audio_volume together\n.");
    return_code = FALSE;
  }
  else
  {
    sink_pad = gst_element_get_static_pad (media->audio_volume, "sink");
    g_assert (sink_pad);

    ret = gst_pad_link (decoder_src_pad, sink_pad);
    if (GST_PAD_LINK_FAILED(ret))
    {
      return_code = FALSE;
      g_debug (" Link failed.\n");
    }
    else
    {
      g_debug (" Link succeeded.\n");

      g_object_set (G_OBJECT(media->audio_volume), "volume", media->volume,
          NULL);

      gst_element_set_state (media->audio_volume, GST_STATE_PLAYING);
      gst_element_set_state (media->audio_converter, GST_STATE_PLAYING);
      gst_element_set_state (media->audio_resampler, GST_STATE_PLAYING);
      gst_element_set_state (media->audio_filter, GST_STATE_PLAYING);

      ghost_pad = gst_ghost_pad_new (
          "a_src", gst_element_get_static_pad (media->audio_filter,
            "src"));
      gst_pad_set_active (ghost_pad, TRUE);
      gst_element_add_pad (bin, ghost_pad);

      output_sink_pad = gst_element_get_request_pad (_mb_global_data.audio_mixer,
          "sink_%u");
      g_assert(output_sink_pad);

      media->audio_pad_name = gst_pad_get_name(output_sink_pad);
      g_debug ("audiomixer: new pad requested (%s)\n", media->audio_pad_name);

      ret = gst_pad_link (ghost_pad, output_sink_pad);
      if (GST_PAD_LINK_FAILED(ret))
      {
        return_code = FALSE;
        g_debug (" Could not link %s and audiomixer together.\n", media->name);
      }

      gst_object_unref (output_sink_pad);
      gst_object_unref (sink_pad);
    }
  }
  return return_code;
}
コード例 #16
0
static void
really_add_filter (GstPad *pad,
		   gboolean blocked,
		   RBGstPipelineOp *op)
{
	GstPad *binsinkpad;
	GstPad *binsrcpad;
	GstPad *realpad;
	GstPad *prevpad;
	GstElement *bin;
	GstElement *identity;
	GstElement *audioconvert;
	GstElement *audioconvert2;
	GstPadLinkReturn link;

	rb_debug ("adding filter %p", op->element);

	/*
	 * it kind of looks like we need audioconvert elements on either side of each filter
	 * to prevent caps renegotiation from causing 'internal data flow error' errors.
	 * this probably means we'd be doing a few unnecessary conversions when there are
	 * multiple filters in the pipeline, but at least it works.
	 */

	/* create containing bin */
	bin = gst_bin_new (NULL);
	audioconvert = gst_element_factory_make ("audioconvert", NULL);
	audioconvert2 = gst_element_factory_make ("audioconvert", NULL);
	gst_bin_add_many (GST_BIN (bin), audioconvert, op->element, audioconvert2, NULL);
	gst_element_link_many (audioconvert, op->element, audioconvert2, NULL);

	/* create ghost pads */
	realpad = gst_element_get_static_pad (audioconvert, "sink");
	binsinkpad = gst_ghost_pad_new ("sink", realpad);
	gst_element_add_pad (bin, binsinkpad);
	gst_object_unref (realpad);

	realpad = gst_element_get_static_pad (audioconvert2, "src");
	binsrcpad = gst_ghost_pad_new ("src", realpad);
	gst_element_add_pad (bin, binsrcpad);
	gst_object_unref (realpad);

	/* chuck it into the filter bin */
	gst_bin_add (GST_BIN (op->fixture), bin);
	identity = gst_bin_get_by_name (GST_BIN (op->fixture), "filteridentity");
	realpad = gst_element_get_static_pad (identity, "sink");
	prevpad = gst_pad_get_peer (realpad);
	gst_object_unref (identity);

	gst_pad_unlink (prevpad, realpad);

	link = gst_pad_link (prevpad, binsinkpad);
	gst_object_unref (prevpad);
	if (link != GST_PAD_LINK_OK) {
		g_warning ("couldn't link new filter into pipeline (sink): %d", link);
		/* make some attempt at cleaning up; probably won't work though */
		gst_pad_link (prevpad, realpad);
		gst_object_unref (realpad);
		gst_bin_remove (GST_BIN (op->fixture), bin);
		gst_object_unref (bin);

		free_pipeline_op (op);
		return;
	}

	link = gst_pad_link (binsrcpad, realpad);
	gst_object_unref (realpad);
	if (link != GST_PAD_LINK_OK) {
		g_warning ("couldn't link new filter into pipeline (src): %d", link);
		/* doubt we can do anything helpful here.. */
	}

	/* if we're supposed to be playing, unblock the sink */
	if (blocked) {
		rb_debug ("unblocking pad after adding filter");
		gst_element_set_state (bin, GST_STATE_PLAYING);
		gst_pad_set_blocked_async (pad, FALSE, (GstPadBlockCallback)pipeline_op_done, NULL);
	} else {
		gst_element_set_state (bin, GST_STATE_PAUSED);
	}

	_rb_player_gst_filter_emit_filter_inserted (RB_PLAYER_GST_FILTER (op->player), op->element);

	free_pipeline_op (op);
}
コード例 #17
0
void AudioTestSource_i::start() throw (CF::Resource::StartError, CORBA::SystemException)
{
	AudioTestSource_base::start();

	LOG_DEBUG (AudioTestSource_i, "Initializing GStreamer Pipeline");
	pipeline = gst_pipeline_new ("audio-pipeline");

	bus = gst_pipeline_get_bus(reinterpret_cast<GstPipeline*>(pipeline));

	src      = gst_element_factory_make ("audiotestsrc",  "audio");
	resamp   = gst_element_factory_make ("audioresample",  "resampler");
	conv     = gst_element_factory_make ("audioconvert",  "input_converter");
	sink     = gst_element_factory_make ("appsink",  "bio_out");

	std::strstream audio_type;
	audio_type << "audio/x-raw-int"
	           << ",channels=1"
	           << ",rate=" << sample_rate
			   << ",signed=(boolean)true"
			   << ",width=16"
			   << ",depth=16"
			   << ",endianness=1234";

	GstCaps *audio_caps = gst_caps_from_string (audio_type.str());
	g_object_set (sink, "caps", audio_caps, NULL);
	gst_caps_unref (audio_caps);

	g_object_set (sink, "emit-signals", TRUE, NULL);
	g_signal_connect (sink, "new-buffer", G_CALLBACK (AudioTestSource_i::_new_gst_buffer), this);

	_set_gst_src_param("*");
	_set_gst_resamp_param("*");

	gst_bin_add_many (GST_BIN (pipeline), src, resamp, conv, sink, NULL);

	if (!gst_element_link_many (src, resamp, conv, sink, NULL)) {
		LOG_WARN (AudioTestSource_i, "Failed to link elements!");
	}

	sri = BULKIO::StreamSRI();
	sri.hversion = 1;
	sri.xstart = 0.0;
	sri.xdelta = 1.0/sample_rate;
	sri.xunits = BULKIO::UNITS_TIME;
	sri.subsize = 0;
	sri.ystart = 0.0;
	sri.ydelta = 0.0;
	sri.yunits = BULKIO::UNITS_NONE;
	sri.mode = 0;
	sri.blocking = !is_live;
	sri.streamID = this->stream_id.c_str();

	sri.keywords.length(5);

	// New-style keyword for audio streams
	sri.keywords[0].id = CORBA::string_dup("AUDIO_TYPE");
	sri.keywords[0].value <<= audio_type;

	// Backwards compatibility
	sri.keywords[1].id = CORBA::string_dup("AUDIO_ENCODING");
	sri.keywords[1].value <<= "PCM_SIGNED";

	sri.keywords[2].id = CORBA::string_dup("AUDIO_CHANNELS");
	sri.keywords[2].value <<= 1;

	sri.keywords[3].id = CORBA::string_dup("AUDIO_FRAME_SIZE");
	sri.keywords[3].value <<= 2;

	sri.keywords[4].id = CORBA::string_dup("AUDIO_FRAME_RATE");
	sri.keywords[4].value <<= static_cast<float>(sample_rate);

	audio_out->pushSRI(sri);

	LOG_DEBUG (AudioTestSource_i, "Starting GStreamer Pipeline");
	gst_element_set_state (pipeline, GST_STATE_PLAYING);
}
コード例 #18
0
ファイル: cddasongloader.cpp プロジェクト: Aceler/Clementine
void CddaSongLoader::LoadSongs() {
  QMutexLocker locker(&mutex_load_);
  cdio_ = cdio_open(url_.path().toLocal8Bit().constData(), DRIVER_DEVICE);
  if (cdio_ == nullptr) {
    return;
  }
  // Create gstreamer cdda element
  GError* error = nullptr;
  cdda_ = gst_element_make_from_uri(GST_URI_SRC, "cdda://", nullptr, &error);
  if (error) {
    qLog(Error) << error->code << error->message;
  }
  if (cdda_ == nullptr) {
    return;
  }

  if (!url_.isEmpty()) {
    g_object_set(cdda_, "device", g_strdup(url_.path().toLocal8Bit().constData()),
                 nullptr);
  }
  if (g_object_class_find_property (G_OBJECT_GET_CLASS (cdda_), "paranoia-mode")) {
    g_object_set (cdda_, "paranoia-mode", 0, NULL);
  }

  // Change the element's state to ready and paused, to be able to query it
  if (gst_element_set_state(cdda_, GST_STATE_READY) ==
          GST_STATE_CHANGE_FAILURE ||
      gst_element_set_state(cdda_, GST_STATE_PAUSED) ==
          GST_STATE_CHANGE_FAILURE) {
    gst_element_set_state(cdda_, GST_STATE_NULL);
    gst_object_unref(GST_OBJECT(cdda_));
    return;
  }

  // Get number of tracks
  GstFormat fmt = gst_format_get_by_nick("track");
  GstFormat out_fmt = fmt;
  gint64 num_tracks = 0;
  if (!gst_element_query_duration(cdda_, out_fmt, &num_tracks) ||
      out_fmt != fmt) {
    qLog(Error) << "Error while querying cdda GstElement";
    gst_object_unref(GST_OBJECT(cdda_));
    return;
  }

  SongList songs;
  for (int track_number = 1; track_number <= num_tracks; track_number++) {
    // Init song
    Song song;
    song.set_id(track_number);
    song.set_valid(true);
    song.set_filetype(Song::Type_Cdda);
    song.set_url(
        GetUrlFromTrack(track_number));
    song.set_title(QString("Track %1").arg(track_number));
    song.set_track(track_number);
    songs << song;
  }
  emit SongsLoaded(songs);


  gst_tag_register_musicbrainz_tags();

  GstElement* pipeline = gst_pipeline_new("pipeline");
  GstElement* sink = gst_element_factory_make ("fakesink", NULL);
  gst_bin_add_many (GST_BIN (pipeline), cdda_, sink, NULL);
  gst_element_link (cdda_, sink);
  gst_element_set_state(pipeline, GST_STATE_READY);
  gst_element_set_state(pipeline, GST_STATE_PAUSED);

  // Get TOC and TAG messages
  GstMessage* msg = nullptr;
  GstMessage* msg_toc = nullptr;
  GstMessage* msg_tag = nullptr;
  while ((msg = gst_bus_timed_pop_filtered(GST_ELEMENT_BUS(pipeline),
      GST_SECOND, (GstMessageType)(GST_MESSAGE_TOC | GST_MESSAGE_TAG)))) {
    if (GST_MESSAGE_TYPE(msg) == GST_MESSAGE_TOC) {
      if (msg_toc) gst_message_unref(msg_toc); // Shouldn't happen, but just in case
      msg_toc = msg;
    } else if (GST_MESSAGE_TYPE(msg) == GST_MESSAGE_TAG) {
      if (msg_tag) gst_message_unref(msg_tag);
      msg_tag = msg;
    }
  }

  // Handle TOC message: get tracks duration
  if (msg_toc) {
    GstToc* toc;
    gst_message_parse_toc (msg_toc, &toc, nullptr);
    if (toc) {
      GList* entries = gst_toc_get_entries(toc);
      if (entries && songs.size() <= g_list_length (entries)) {
        int i = 0;
        for (GList* node = entries; node != nullptr; node = node->next) {
          GstTocEntry *entry = static_cast<GstTocEntry*>(node->data);
          quint64 duration = 0;
          gint64 start, stop;
          if (gst_toc_entry_get_start_stop_times (entry, &start, &stop))
            duration = stop - start;
          songs[i++].set_length_nanosec(duration);
        }
      }
    }
    gst_message_unref(msg_toc);
  }
  emit SongsDurationLoaded(songs);

  // Handle TAG message: generate MusicBrainz DiscId
  if (msg_tag) {
    GstTagList* tags = nullptr;
    gst_message_parse_tag(msg_tag, &tags);
    char* string_mb = nullptr;
    if (gst_tag_list_get_string(tags, GST_TAG_CDDA_MUSICBRAINZ_DISCID,
                                &string_mb)) {
      QString musicbrainz_discid(string_mb);
      qLog(Info) << "MusicBrainz discid: " << musicbrainz_discid;

      MusicBrainzClient* musicbrainz_client = new MusicBrainzClient;
      connect(musicbrainz_client, SIGNAL(Finished(const QString&, const QString&,
                                                  MusicBrainzClient::ResultList)),
              SLOT(AudioCDTagsLoaded(const QString&, const QString&,
                                     MusicBrainzClient::ResultList)));
      musicbrainz_client->StartDiscIdRequest(musicbrainz_discid);
      g_free(string_mb);
      gst_message_unref(msg_tag);
      gst_tag_list_free(tags);
    }
  }

  gst_element_set_state(pipeline, GST_STATE_NULL);
  // This will also cause cdda_ to be unref'd.
  gst_object_unref(pipeline);
}