Beispiel #1
0
void 
_cmd_add_vpl()
{
    GstElement *vns;
    GstElement *tprobe;
    struct av_source *avs;

    avs = (struct av_source *)malloc(sizeof(struct av_source));

    vns = get_source_bin(NULL);

    gst_bin_add(GST_BIN(pipeline), vns);
    avs->vstruct = add_new_vstream_source(vns, 
            "vsrc", 
            "new video source");
    avs->astruct = add_new_stream_source(vns, 
            "asrc%d", 
            "new audio source");

    avs->astruct->pprobed = gst_element_get_static_pad(vns, "asrc%d");
    avs->astruct->ep_handler = gst_pad_add_event_probe(
            gst_element_get_static_pad(vns, "asrc%d"), 
            G_CALLBACK(cb_eos_in_videopl_astream), NULL);
    avs->vstruct->pprobed = gst_element_get_static_pad(vns, "vsrc");
    avs->vstruct->ep_handler = gst_pad_add_event_probe(
            gst_element_get_static_pad(vns, "vsrc"), 
            G_CALLBACK(cb_eos_in_videopl_vstream), avs);

    gst_element_set_state(vns, GST_STATE_PLAYING);
    g_print("numsinkpads gvideomixer - adder: %d <-> %d\n", 
            gvmixer->numsinkpads, adder->numsinkpads);

}
gboolean
scope_parser_attach_to_tee(ScopeParser *parser, GstElement *tee)
{
    GstPad *sink_pad;
    
    if(parser == NULL) {
        return FALSE;
    }
    
    parser->tee_pad = gst_element_get_request_pad(tee, "src1");
    if(parser->tee_pad == NULL) {
        g_warning("Could not get a source pad from the tee");
        return FALSE;
    }
    
    parser->fakesink = gst_element_factory_make("fakesink", "fakesink");
    
    if(parser->fakesink == NULL) {
        g_warning("Could not create fakesink element");
        return FALSE;
    }
    
    sink_pad = gst_element_get_pad(parser->fakesink, "sink");
    gst_pad_link(parser->tee_pad, sink_pad);
    
    parser->buffer_probe_id = gst_pad_add_buffer_probe(parser->tee_pad, 
        G_CALLBACK(scope_parser_buffer_probe), parser);
    parser->event_probe_id = gst_pad_add_event_probe(parser->tee_pad, 
        G_CALLBACK(scope_parser_event_probe), parser);
        
    gst_object_unref(parser->tee_pad);
    gst_object_unref(sink_pad);
    
    return TRUE;
}
Beispiel #3
0
void 
_cmd_add_audio_file(const struct typed_command *lcom)
{
    GstElement *nfile;
    struct input_source *nsrc;
    GstPad *probepad;
    struct stat *finfo;

    finfo = (struct stat *)malloc(sizeof(struct stat));
    if(0 == stat(lcom->cmd_argv[1], finfo)){
        nfile = get_source_bin(lcom->cmd_argv[1]);
        gst_bin_add(GST_BIN(pipeline), nfile);
        probepad = gst_element_get_static_pad(nfile, "asrc%d");

        nsrc = add_new_stream_source(nfile, "asrc%d", 
                "additional file source");
        //nsrc->ep_handler = gst_pad_add_event_probe(
        nsrc->ep_handler = gst_pad_add_event_probe(probepad, 
                G_CALLBACK(cb_stop_single_file), nsrc);
        gst_element_set_state(nfile, GST_STATE_PLAYING);
        nsrc->pprobed = probepad;
    }
    else{
        g_printf("File does not exist!\n");
    }
    free(finfo);
};
Beispiel #4
0
void test_buffer_probe_n_times()
{
  GstElement *pipeline, *fakesrc, *fakesink;
  GstBus *bus;
  GstMessage *message;
  GstPad *pad;
  xmlfile = "gstutils_test_buffer_probe_n_times";
  std_log(LOG_FILENAME_LINE, "Test Started gstutils_test_buffer_probe_n_times");

  pipeline = gst_element_factory_make ("pipeline", NULL);
  fakesrc = gst_element_factory_make ("fakesrc", NULL);
  fakesink = gst_element_factory_make ("fakesink", NULL);

  g_object_set (fakesrc, "num-buffers", (int) 10, NULL);
  gst_bin_add_many (GST_BIN (pipeline), fakesrc, fakesink, NULL);
  gst_element_link (fakesrc, fakesink);

  pad = gst_element_get_pad (fakesink, "sink");
  gst_pad_add_data_probe (pad, G_CALLBACK (data_probe), SPECIAL_POINTER (0));
  gst_pad_add_buffer_probe (pad, G_CALLBACK (buffer_probe),
      SPECIAL_POINTER (1));
  gst_pad_add_event_probe (pad, G_CALLBACK (event_probe), SPECIAL_POINTER (2));
  gst_object_unref (pad);

  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  bus = gst_element_get_bus (pipeline);
  message = gst_bus_poll (bus, GST_MESSAGE_EOS, -1);
  gst_message_unref (message);
  gst_object_unref (bus);

  g_assert (n_buffer_probes == 10);     /* one for every buffer */
  g_assert (n_event_probes == 3);       /* new segment, latency and eos */
  g_assert (n_data_probes == 13);       /* duh */

  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);

  /* make sure nothing was sent in addition to the above when shutting down */
  g_assert (n_buffer_probes == 10);     /* one for every buffer */
  g_assert (n_event_probes == 3);       /* new segment, latency and eos */
  g_assert (n_data_probes == 13);       /* duh */
  
  std_log(LOG_FILENAME_LINE, "Test Successful");
  create_xml(0);
}
Beispiel #5
0
void 
_cmd_add_net(const char *url)
{
    GstElement *ns;
    GstElement *tprobe;
    struct input_source *net_in;

    ns = get_net_source_bin(url);
    /* this probes for EOS */
    tprobe = gst_bin_get_by_name(GST_BIN(ns), "netsrc");
    gst_bin_add(GST_BIN(pipeline), ns);

    net_in = add_new_stream_source(ns, "asrc%d", url );
    net_in->ep_handler = gst_pad_add_event_probe(
            gst_element_get_static_pad(tprobe,"src"), 
            G_CALLBACK(net_event), net_in);
    gst_element_set_state(ns, GST_STATE_PLAYING);	
    net_in->pprobed = gst_element_get_static_pad(tprobe, "src");
}
Beispiel #6
0
void test_buffer_probe_once()
{
  GstElement *pipeline, *fakesrc, *fakesink;
  GstBus *bus;
  GstMessage *message;
  GstPad *pad;
  guint id1, id2, id3;
  
  xmlfile = "gstutils_test_buffer_probe_once";
  std_log(LOG_FILENAME_LINE, "Test Started gstutils_test_buffer_probe_once");

  pipeline = gst_element_factory_make ("pipeline", NULL);
  fakesrc = gst_element_factory_make ("fakesrc", NULL);
  fakesink = gst_element_factory_make ("fakesink", NULL);

  g_object_set (fakesrc, "num-buffers", (int) 10, NULL);

  gst_bin_add_many (GST_BIN (pipeline), fakesrc, fakesink, NULL);
  gst_element_link (fakesrc, fakesink);

  pad = gst_element_get_pad (fakesink, "sink");
  id1 = gst_pad_add_data_probe (pad, G_CALLBACK (data_probe_once), &id1);
  id2 = gst_pad_add_buffer_probe (pad, G_CALLBACK (buffer_probe_once), &id2);
  id3 = gst_pad_add_event_probe (pad, G_CALLBACK (event_probe_once), &id3);
  gst_object_unref (pad);

  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  bus = gst_element_get_bus (pipeline);
  message = gst_bus_poll (bus, GST_MESSAGE_EOS, -1);
  gst_message_unref (message);
  gst_object_unref (bus);

  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);

  g_assert (n_buffer_probes_once == 1); /* can we hit it and quit? */
  g_assert (n_event_probes_once == 1);  /* i said, can we hit it and quit? */
  g_assert (n_data_probes_once == 1);   /* let's hit it and quit!!! */
  
  std_log(LOG_FILENAME_LINE, "Test Successful");
  create_xml(0);
}
GstStreamInfo *
gst_stream_info_new (GstObject * object,
    GstStreamType type, const gchar * decoder, const GstCaps * caps)
{
  GstStreamInfo *info;

  info = g_object_new (GST_TYPE_STREAM_INFO, NULL);

  gst_object_ref (object);
  if (GST_IS_PAD (object)) {
    gst_pad_add_event_probe (GST_PAD_CAST (object),
        G_CALLBACK (cb_probe), info);
  }
  info->object = object;
  info->type = type;
  info->decoder = g_strdup (decoder);
  info->origin = object;
  if (caps) {
    info->caps = gst_caps_copy (caps);
  }

  return info;
}
Beispiel #8
0
// this function handles the link with other elements
static gboolean
gst_haar_adjust_set_caps(GstPad *pad, GstCaps *caps)
{
    GstHaarAdjust *filter;
    GstPad        *other_pad;
    GstStructure  *structure;
    gint           width, height, depth;

    filter = GST_HAARADJUST(gst_pad_get_parent(pad));
    structure = gst_caps_get_structure(caps, 0);
    gst_structure_get_int(structure, "width",  &width);
    gst_structure_get_int(structure, "height", &height);
    gst_structure_get_int(structure, "depth",  &depth);

    filter->image   = cvCreateImage(cvSize(width, height), depth/3, 3);

    // add roi event probe on the sinkpad
    gst_pad_add_event_probe(filter->sinkpad, (GCallback) events_cb, filter);

    other_pad = (pad == filter->srcpad) ? filter->sinkpad : filter->srcpad;
    gst_object_unref(filter);
    return gst_pad_set_caps(other_pad, caps);
}
Beispiel #9
0
static void
test_one_after_other_full (gboolean async)
{
  GstElement *pipeline;
  GstElement *comp, *sink, *source1, *source2;
  CollectStructure *collect;
  GstBus *bus;
  GstMessage *message;
  gboolean carry_on = TRUE;
  guint64 start, stop;
  gint64 duration;
  GstPad *sinkpad;

  pipeline = gst_pipeline_new ("test_pipeline");
  comp =
      gst_element_factory_make_or_warn ("gnlcomposition", "test_composition");
  fail_if (comp == NULL);

  /*
     Source 1
     Start : 0s
     Duration : 1s
     Media start : 5s
     Media Duartion : 1s
     Priority : 1
   */
  source1 =
      videotest_gnl_src_full ("source1", 0, 1 * GST_SECOND, 5 * GST_SECOND,
      1 * GST_SECOND, 3, 1);
  fail_if (source1 == NULL);
  check_start_stop_duration (source1, 0, 1 * GST_SECOND, 1 * GST_SECOND);

  /*
     Source 2
     Start : 1s
     Duration : 1s
     Media start : 2s
     Media Duration : 1s
     Priority : 1
   */
  source2 = videotest_gnl_src_full ("source2", 1 * GST_SECOND, 1 * GST_SECOND,
      2 * GST_SECOND, 1 * GST_SECOND, 2, 1);
  fail_if (source2 == NULL);
  check_start_stop_duration (source2, 1 * GST_SECOND, 2 * GST_SECOND,
      1 * GST_SECOND);

  /* Add one source */

  DISABLE_ASYNC_UPDATE;
  gst_bin_add (GST_BIN (comp), source1);
  ENABLE_ASYNC_UPDATE;
  check_start_stop_duration (comp, 0, 1 * GST_SECOND, 1 * GST_SECOND);

  ASSERT_OBJECT_REFCOUNT (source1, "source1", 1);

  /* Second source */

  DISABLE_ASYNC_UPDATE;
  gst_bin_add (GST_BIN (comp), source2);
  ENABLE_ASYNC_UPDATE;
  check_start_stop_duration (comp, 0, 2 * GST_SECOND, 2 * GST_SECOND);

  ASSERT_OBJECT_REFCOUNT (source2, "source2", 1);

  /* Remove first source */

  gst_object_ref (source1);
  DISABLE_ASYNC_UPDATE;
  gst_bin_remove (GST_BIN (comp), source1);
  ENABLE_ASYNC_UPDATE;
  check_start_stop_duration (comp, 1 * GST_SECOND, 2 * GST_SECOND,
      1 * GST_SECOND);

  ASSERT_OBJECT_REFCOUNT (source1, "source1", 1);

  /* Re-add first source */

  DISABLE_ASYNC_UPDATE;
  gst_bin_add (GST_BIN (comp), source1);
  ENABLE_ASYNC_UPDATE;
  check_start_stop_duration (comp, 0, 2 * GST_SECOND, 2 * GST_SECOND);
  gst_object_unref (source1);

  ASSERT_OBJECT_REFCOUNT (source1, "source1", 1);

  sink = gst_element_factory_make_or_warn ("fakesink", "sink");
  fail_if (sink == NULL);

  gst_bin_add_many (GST_BIN (pipeline), comp, sink, NULL);

  /* Shared data */
  collect = g_new0 (CollectStructure, 1);
  collect->comp = comp;
  collect->sink = sink;

  /* Expected segments */
  collect->expected_segments = g_list_append (collect->expected_segments,
      segment_new (1.0, GST_FORMAT_TIME, 5 * GST_SECOND, 6 * GST_SECOND, 0));
  collect->expected_segments = g_list_append (collect->expected_segments,
      segment_new (1.0, GST_FORMAT_TIME,
          2 * GST_SECOND, 3 * GST_SECOND, 1 * GST_SECOND));

  g_signal_connect (G_OBJECT (comp), "pad-added",
      G_CALLBACK (composition_pad_added_cb), collect);

  sinkpad = gst_element_get_pad (sink, "sink");
  gst_pad_add_event_probe (sinkpad, G_CALLBACK (sinkpad_event_probe), collect);
  gst_pad_add_buffer_probe (sinkpad, G_CALLBACK (sinkpad_buffer_probe),
      collect);

  bus = gst_element_get_bus (GST_ELEMENT (pipeline));

  GST_DEBUG ("Setting pipeline to PLAYING");
  ASSERT_OBJECT_REFCOUNT (source1, "source1", 1);

  fail_if (gst_element_set_state (GST_ELEMENT (pipeline),
          GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE);

  GST_DEBUG ("Let's poll the bus");

  while (carry_on) {
    message = gst_bus_poll (bus, GST_MESSAGE_ANY, GST_SECOND / 2);
    if (message) {
      switch (GST_MESSAGE_TYPE (message)) {
        case GST_MESSAGE_EOS:
          /* we should check if we really finished here */
          GST_WARNING ("Got an EOS");
          carry_on = FALSE;
          break;
        case GST_MESSAGE_SEGMENT_START:
        case GST_MESSAGE_SEGMENT_DONE:
          /* We shouldn't see any segement messages, since we didn't do a segment seek */
          GST_WARNING ("Saw a Segment start/stop");
          fail_if (TRUE);
          break;
        case GST_MESSAGE_ERROR:
          GST_WARNING ("Saw an ERROR");
          fail_if (TRUE);
        default:
          break;
      }
      gst_mini_object_unref (GST_MINI_OBJECT (message));
    }
  }

  GST_DEBUG ("Setting pipeline to NULL");

  fail_if (gst_element_set_state (GST_ELEMENT (pipeline),
          GST_STATE_READY) == GST_STATE_CHANGE_FAILURE);

  fail_if (collect->expected_segments != NULL);

  GST_DEBUG ("Resetted pipeline to READY");

  /* Expected segments */
  collect->expected_segments = g_list_append (collect->expected_segments,
      segment_new (1.0, GST_FORMAT_TIME, 5 * GST_SECOND, 6 * GST_SECOND, 0));
  collect->expected_segments = g_list_append (collect->expected_segments,
      segment_new (1.0, GST_FORMAT_TIME,
          2 * GST_SECOND, 3 * GST_SECOND, 1 * GST_SECOND));
  collect->gotsegment = FALSE;


  GST_DEBUG ("Setting pipeline to PLAYING again");

  fail_if (gst_element_set_state (GST_ELEMENT (pipeline),
          GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE);

  carry_on = TRUE;

  GST_DEBUG ("Let's poll the bus AGAIN");

  while (carry_on) {
    message = gst_bus_poll (bus, GST_MESSAGE_ANY, GST_SECOND / 2);
    if (message) {
      switch (GST_MESSAGE_TYPE (message)) {
        case GST_MESSAGE_EOS:
          /* we should check if we really finished here */
          carry_on = FALSE;
          break;
        case GST_MESSAGE_SEGMENT_START:
        case GST_MESSAGE_SEGMENT_DONE:
          /* We shouldn't see any segement messages, since we didn't do a segment seek */
          GST_WARNING ("Saw a Segment start/stop");
          fail_if (TRUE);
          break;
        case GST_MESSAGE_ERROR:
          GST_ERROR ("Saw an ERROR");
          fail_if (TRUE);
        default:
          break;
      }
      gst_mini_object_unref (GST_MINI_OBJECT (message));
    } else {
      GST_DEBUG ("bus_poll responded, but there wasn't any message...");
    }
  }

  fail_if (collect->expected_segments != NULL);

  gst_object_unref (GST_OBJECT (sinkpad));

  fail_if (gst_element_set_state (GST_ELEMENT (pipeline),
          GST_STATE_NULL) == GST_STATE_CHANGE_FAILURE);

  ASSERT_OBJECT_REFCOUNT_BETWEEN (pipeline, "main pipeline", 1, 2);
  gst_object_unref (pipeline);
  ASSERT_OBJECT_REFCOUNT_BETWEEN (bus, "main bus", 1, 2);
  gst_object_unref (bus);

  g_free (collect);
}
Beispiel #10
0
static void
test_one_under_another_full (gboolean async)
{
  GstElement *pipeline;
  GstElement *comp, *sink, *source1, *source2;
  CollectStructure *collect;
  GstBus *bus;
  GstMessage *message;
  gboolean carry_on = TRUE;
  guint64 start, stop;
  gint64 duration;
  GstPad *sinkpad;

  pipeline = gst_pipeline_new ("test_pipeline");
  comp =
      gst_element_factory_make_or_warn ("gnlcomposition", "test_composition");
  fail_if (comp == NULL);

  /*
     Source 1
     Start : 0s
     Duration : 2s
     Priority : 1
   */
  source1 = videotest_gnl_src ("source1", 0, 2 * GST_SECOND, 3, 1);
  fail_if (source1 == NULL);
  check_start_stop_duration (source1, 0, 2 * GST_SECOND, 2 * GST_SECOND);

  /*
     Source 2
     Start : 1s
     Duration : 2s
     Priority : 2
   */
  source2 = videotest_gnl_src ("source2", 1 * GST_SECOND, 2 * GST_SECOND, 2, 2);
  fail_if (source2 == NULL);
  check_start_stop_duration (source2, 1 * GST_SECOND, 3 * GST_SECOND,
      2 * GST_SECOND);

  /* Add two sources */

  DISABLE_ASYNC_UPDATE;
  gst_bin_add (GST_BIN (comp), source1);
  gst_bin_add (GST_BIN (comp), source2);
  ENABLE_ASYNC_UPDATE;
  check_start_stop_duration (comp, 0, 3 * GST_SECOND, 3 * GST_SECOND);

  /* Remove second source */

  gst_object_ref (source1);
  DISABLE_ASYNC_UPDATE;
  gst_bin_remove (GST_BIN (comp), source1);
  ENABLE_ASYNC_UPDATE;
  check_start_stop_duration (comp, 1 * GST_SECOND, 3 * GST_SECOND,
      2 * GST_SECOND);

  /* Re-add second source */

  DISABLE_ASYNC_UPDATE;
  gst_bin_add (GST_BIN (comp), source1);
  ENABLE_ASYNC_UPDATE;
  check_start_stop_duration (comp, 0, 3 * GST_SECOND, 3 * GST_SECOND);
  gst_object_unref (source1);

  sink = gst_element_factory_make_or_warn ("fakesink", "sink");
  fail_if (sink == NULL);

  gst_bin_add_many (GST_BIN (pipeline), comp, sink, NULL);

  /* Shared data */
  collect = g_new0 (CollectStructure, 1);
  collect->comp = comp;
  collect->sink = sink;

  /* Expected segments */
  collect->expected_segments = g_list_append (collect->expected_segments,
      segment_new (1.0, GST_FORMAT_TIME, 0, GST_SECOND, 0));

  collect->expected_segments = g_list_append (collect->expected_segments,
      segment_new (1.0, GST_FORMAT_TIME, GST_SECOND, 2 * GST_SECOND,
          GST_SECOND));

  collect->expected_segments = g_list_append (collect->expected_segments,
      segment_new (1.0, GST_FORMAT_TIME,
          2 * GST_SECOND, 3 * GST_SECOND, 2 * GST_SECOND));

  g_signal_connect (G_OBJECT (comp), "pad-added",
      G_CALLBACK (composition_pad_added_cb), collect);

  sinkpad = gst_element_get_pad (sink, "sink");
  gst_pad_add_event_probe (sinkpad, G_CALLBACK (sinkpad_event_probe), collect);
  gst_pad_add_buffer_probe (sinkpad, G_CALLBACK (sinkpad_buffer_probe),
      collect);

  bus = gst_element_get_bus (GST_ELEMENT (pipeline));

  fail_if (gst_element_set_state (GST_ELEMENT (pipeline),
          GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE);

  while (carry_on) {
    message = gst_bus_poll (bus, GST_MESSAGE_ANY, GST_SECOND / 2);
    if (message) {
      switch (GST_MESSAGE_TYPE (message)) {
        case GST_MESSAGE_EOS:
          /* we should check if we really finished here */
          carry_on = FALSE;
          break;
        case GST_MESSAGE_SEGMENT_START:
        case GST_MESSAGE_SEGMENT_DONE:
          /* check if the segment is the correct one (0s-4s) */
          carry_on = FALSE;
          break;
        case GST_MESSAGE_ERROR:
          fail_if (TRUE);
        default:
          break;
      }
      gst_message_unref (message);
    }
  }

  fail_if (collect->expected_segments != NULL);

  fail_if (gst_element_set_state (GST_ELEMENT (pipeline),
          GST_STATE_NULL) == GST_STATE_CHANGE_FAILURE);

  gst_bus_poll (bus, GST_MESSAGE_ANY, GST_SECOND / 2);
  gst_object_unref (GST_OBJECT (sinkpad));
  ASSERT_OBJECT_REFCOUNT_BETWEEN (pipeline, "main pipeline", 1, 2);
  gst_object_unref (pipeline);
  ASSERT_OBJECT_REFCOUNT_BETWEEN (bus, "main bus", 1, 2);
  gst_object_unref (bus);

  g_free (collect);
}
void
_bp_vis_pipeline_setup (BansheePlayer *player)
{
    // The basic pipeline we're constructing is:
    // .audiotee ! queue ! audioresample ! audioconvert ! fakesink

    GstElement *fakesink, *converter, *resampler, *audiosinkqueue;
    GstCaps *caps;
    GstPad *teepad;
    GstPad *pad;

    player->vis_buffer = NULL;
    player->vis_fft = gst_fft_f32_new (SLICE_SIZE * 2, FALSE);
    player->vis_fft_buffer = g_new (GstFFTF32Complex, SLICE_SIZE + 1);
    player->vis_fft_sample_buffer = g_new0 (gfloat, SLICE_SIZE);
    
    // Core elements, if something fails here, it's the end of the world
    audiosinkqueue = gst_element_factory_make ("queue", "vis-queue");

    pad = gst_element_get_static_pad (audiosinkqueue, "sink");
    gst_pad_add_event_probe (pad, G_CALLBACK (_bp_vis_pipeline_event_probe), player);
    gst_object_unref (GST_OBJECT (pad));

    resampler = gst_element_factory_make ("audioresample", "vis-resample");
    converter = gst_element_factory_make ("audioconvert", "vis-convert");
    fakesink = gst_element_factory_make ("fakesink", "vis-sink");

    if (audiosinkqueue == NULL || resampler == NULL || converter == NULL || fakesink == NULL) {
        bp_debug ("Could not construct visualization pipeline, a fundamental element could not be created");
        return;
    }

    // Keep around the 5 most recent seconds of audio so that when resuming
    // visualization we have something to show right away.
    g_object_set (G_OBJECT (audiosinkqueue),
            "leaky", 2,
            "max-size-buffers", 0,
            "max-size-bytes", 0,
            "max-size-time", GST_SECOND * 5,
            NULL);
    
    g_signal_connect (G_OBJECT (fakesink), "handoff", G_CALLBACK (bp_vis_pcm_handoff), player);

    g_object_set (G_OBJECT (fakesink),
            // This enables the handoff signal.
            "signal-handoffs", TRUE,
            // Synchronize so we see vis at the same time as we hear it.
            "sync", TRUE,
            // Drop buffers if they come in too late.  This is mainly used when
            // thawing the vis pipeline.
            "max-lateness", GST_SECOND / 120,
            // Deliver buffers one frame early.  This allows for rendering
            // time.  (TODO: It would be great to calculate this on-the-fly so
            // we match the rendering time.
            "ts-offset", -GST_SECOND / 60,
            // Don't go to PAUSED when we freeze the pipeline.
            "async", FALSE, NULL);
    
    gst_bin_add_many (GST_BIN (player->audiobin), audiosinkqueue, resampler,
                      converter, fakesink, NULL);
    
    pad = gst_element_get_static_pad (audiosinkqueue, "sink");
    teepad = gst_element_get_request_pad (player->audiotee, "src%d");
    gst_pad_link (teepad, pad);
    gst_object_unref (GST_OBJECT (teepad));
    gst_object_unref (GST_OBJECT (pad));
    
    gst_element_link_many (audiosinkqueue, resampler, converter, NULL);
    
    caps = gst_static_caps_get (&vis_data_sink_caps);
    gst_element_link_filtered (converter, fakesink, caps);
    gst_caps_unref (caps);
    
    player->vis_buffer = gst_adapter_new ();
    player->vis_resampler = resampler;
    player->vis_thawing = FALSE;
    player->vis_enabled = FALSE;

    // Disable the pipeline till we hear otherwise from managed land.
    _bp_vis_pipeline_set_blocked (player, TRUE);
}
Beispiel #12
0
bool GstEnginePipeline::Init() {
  // Here we create all the parts of the gstreamer pipeline - from the source
  // to the sink.  The parts of the pipeline are split up into bins:
  //   uri decode bin -> audio bin
  // The uri decode bin is a gstreamer builtin that automatically picks the
  // right type of source and decoder for the URI.

  // The audio bin gets created here and contains:
  //   queue ! audioconvert ! <caps32>
  //         ! ( rgvolume ! rglimiter ! audioconvert2 ) ! tee
  // rgvolume and rglimiter are only created when replaygain is enabled.

  // After the tee the pipeline splits.  One split is converted to 16-bit int
  // samples for the scope, the other is kept as float32 and sent to the
  // speaker.
  //   tee1 ! probe_queue ! probe_converter ! <caps16> ! probe_sink
  //   tee2 ! audio_queue ! equalizer_preamp ! equalizer ! volume ! audioscale
  //        ! convert ! audiosink

  // Audio bin
  audiobin_ = gst_bin_new("audiobin");
  gst_bin_add(GST_BIN(pipeline_), audiobin_);

  // Create the sink
  if (!(audiosink_ = engine_->CreateElement(sink_, audiobin_))) return false;

  if (g_object_class_find_property(G_OBJECT_GET_CLASS(audiosink_), "device") &&
      !device_.toString().isEmpty()) {
    switch (device_.type()) {
      case QVariant::Int:
        g_object_set(G_OBJECT(audiosink_),
                     "device", device_.toInt(),
                     nullptr);
        break;
      case QVariant::String:
        g_object_set(G_OBJECT(audiosink_),
                     "device", device_.toString().toUtf8().constData(),
                     nullptr);
        break;

      #ifdef Q_OS_WIN32
      case QVariant::ByteArray: {
        GUID guid = QUuid(device_.toByteArray());
        g_object_set(G_OBJECT(audiosink_),
                     "device", &guid,
                     nullptr);
        break;
      }
      #endif  // Q_OS_WIN32

      default:
        qLog(Warning) << "Unknown device type" << device_;
        break;
    }
  }

  // Create all the other elements
  GstElement* tee, *probe_queue, *probe_converter, *probe_sink, *audio_queue,
      *convert;

  queue_ = engine_->CreateElement("queue2", audiobin_);
  audioconvert_ = engine_->CreateElement("audioconvert", audiobin_);
  tee = engine_->CreateElement("tee", audiobin_);

  probe_queue = engine_->CreateElement("queue", audiobin_);
  probe_converter = engine_->CreateElement("audioconvert", audiobin_);
  probe_sink = engine_->CreateElement("fakesink", audiobin_);

  audio_queue = engine_->CreateElement("queue", audiobin_);
  equalizer_preamp_ = engine_->CreateElement("volume", audiobin_);
  equalizer_ = engine_->CreateElement("equalizer-nbands", audiobin_);
  stereo_panorama_ = engine_->CreateElement("audiopanorama", audiobin_);
  volume_ = engine_->CreateElement("volume", audiobin_);
  audioscale_ = engine_->CreateElement("audioresample", audiobin_);
  convert = engine_->CreateElement("audioconvert", audiobin_);

  if (!queue_ || !audioconvert_ || !tee || !probe_queue || !probe_converter ||
      !probe_sink || !audio_queue || !equalizer_preamp_ || !equalizer_ ||
      !stereo_panorama_ || !volume_ || !audioscale_ || !convert) {
    return false;
  }

  // Create the replaygain elements if it's enabled.  event_probe is the
  // audioconvert element we attach the probe to, which will change depending
  // on whether replaygain is enabled.  convert_sink is the element after the
  // first audioconvert, which again will change.
  GstElement* event_probe = audioconvert_;
  GstElement* convert_sink = tee;

  if (rg_enabled_) {
    rgvolume_ = engine_->CreateElement("rgvolume", audiobin_);
    rglimiter_ = engine_->CreateElement("rglimiter", audiobin_);
    audioconvert2_ = engine_->CreateElement("audioconvert", audiobin_);
    event_probe = audioconvert2_;
    convert_sink = rgvolume_;

    if (!rgvolume_ || !rglimiter_ || !audioconvert2_) {
      return false;
    }

    // Set replaygain settings
    g_object_set(G_OBJECT(rgvolume_), "album-mode", rg_mode_, nullptr);
    g_object_set(G_OBJECT(rgvolume_), "pre-amp", double(rg_preamp_), nullptr);
    g_object_set(G_OBJECT(rglimiter_), "enabled", int(rg_compression_),
                 nullptr);
  }

  // Create a pad on the outside of the audiobin and connect it to the pad of
  // the first element.
  GstPad* pad = gst_element_get_static_pad(queue_, "sink");
  gst_element_add_pad(audiobin_, gst_ghost_pad_new("sink", pad));
  gst_object_unref(pad);

  // Add a data probe on the src pad of the audioconvert element for our scope.
  // We do it here because we want pre-equalized and pre-volume samples
  // so that our visualization are not be affected by them.
  pad = gst_element_get_static_pad(event_probe, "src");
  gst_pad_add_event_probe(pad, G_CALLBACK(EventHandoffCallback), this);
  gst_object_unref(pad);

  // Configure the fakesink properly
  g_object_set(G_OBJECT(probe_sink), "sync", TRUE, nullptr);

  // Set the equalizer bands
  g_object_set(G_OBJECT(equalizer_), "num-bands", 10, nullptr);

  int last_band_frequency = 0;
  for (int i = 0; i < kEqBandCount; ++i) {
    GstObject* band =
        gst_child_proxy_get_child_by_index(GST_CHILD_PROXY(equalizer_), i);

    const float frequency = kEqBandFrequencies[i];
    const float bandwidth = frequency - last_band_frequency;
    last_band_frequency = frequency;

    g_object_set(G_OBJECT(band), "freq", frequency, "bandwidth", bandwidth,
                 "gain", 0.0f, nullptr);
    g_object_unref(G_OBJECT(band));
  }

  // Set the stereo balance.
  g_object_set(G_OBJECT(stereo_panorama_), "panorama", stereo_balance_,
               nullptr);

  // Set the buffer duration.  We set this on this queue instead of the
  // decode bin (in ReplaceDecodeBin()) because setting it on the decode bin
  // only affects network sources.
  // Disable the default buffer and byte limits, so we only buffer based on
  // time.
  g_object_set(G_OBJECT(queue_), "max-size-buffers", 0, nullptr);
  g_object_set(G_OBJECT(queue_), "max-size-bytes", 0, nullptr);
  g_object_set(G_OBJECT(queue_), "max-size-time", buffer_duration_nanosec_,
               nullptr);
  g_object_set(G_OBJECT(queue_), "low-percent", buffer_min_fill_, nullptr);

  if (buffer_duration_nanosec_ > 0) {
    g_object_set(G_OBJECT(queue_), "use-buffering", true, nullptr);
  }

  gst_element_link(queue_, audioconvert_);

  // Create the caps to put in each path in the tee.  The scope path gets 16-bit
  // ints and the audiosink path gets float32.
  GstCaps* caps16 =
      gst_caps_new_simple("audio/x-raw-int", "width", G_TYPE_INT, 16, "signed",
                          G_TYPE_BOOLEAN, true, nullptr);
  GstCaps* caps32 = gst_caps_new_simple("audio/x-raw-float", "width",
                                        G_TYPE_INT, 32, nullptr);
  if (mono_playback_) {
    gst_caps_set_simple(caps32, "channels", G_TYPE_INT, 1, nullptr);
  }

  // Link the elements with special caps
  gst_element_link_filtered(probe_converter, probe_sink, caps16);
  gst_element_link_filtered(audioconvert_, convert_sink, caps32);
  gst_caps_unref(caps16);
  gst_caps_unref(caps32);

  // Link the outputs of tee to the queues on each path.
  gst_pad_link(gst_element_get_request_pad(tee, "src%d"),
               gst_element_get_static_pad(probe_queue, "sink"));
  gst_pad_link(gst_element_get_request_pad(tee, "src%d"),
               gst_element_get_static_pad(audio_queue, "sink"));

  // Link replaygain elements if enabled.
  if (rg_enabled_) {
    gst_element_link_many(rgvolume_, rglimiter_, audioconvert2_, tee, nullptr);
  }

  // Link everything else.
  gst_element_link(probe_queue, probe_converter);
  gst_element_link_many(audio_queue, equalizer_preamp_, equalizer_,
                        stereo_panorama_, volume_, audioscale_, convert,
                        audiosink_, nullptr);

  // Add probes and handlers.
  gst_pad_add_buffer_probe(gst_element_get_static_pad(probe_converter, "src"),
                           G_CALLBACK(HandoffCallback), this);
  gst_bus_set_sync_handler(gst_pipeline_get_bus(GST_PIPELINE(pipeline_)),
                           BusCallbackSync, this);
  bus_cb_id_ = gst_bus_add_watch(gst_pipeline_get_bus(GST_PIPELINE(pipeline_)),
                                 BusCallback, this);

  MaybeLinkDecodeToAudio();

  return true;
}
Beispiel #13
0
static void 
papaya_video_tree_build()
{
    GstElement *apq, *vpq;
    GstElement *avorb;
    GstElement *vshoutsend;
    GstElement *vtheora;
    GstElement *cbprobe;
    GstElement *generalout;

    globconf_t *conf = parsed_conf;

    struct av_source *av;
    /* video playlist, video live, xorg live
     * streams are handled here 
     */ 	
    if(STREAM_TYPE == VIDEO_PLAYLIST ||
       STREAM_TYPE == DVB_LIVE ||
       STREAM_TYPE == DV1394_LIVE)
        srcbin = get_source_bin(NULL);
    else if(STREAM_TYPE == VIDEO_LIVE)
        srcbin = get_av_live_source_bin();
    else if(STREAM_TYPE == XORG_LIVE)	
        srcbin = get_x_source_bin();

    /* add the source bin to the pipeline
     */ 
    CHECK(gst_bin_add(GST_BIN(pipeline), srcbin));

    /* in case of video streaming, audio is encoded in 
     * vorbis format and video in theora format
     * TODO: for both video and audio we should have a tee element
     * to which attaching monitors
     */
    gvmixer = gst_element_factory_make("videomixer", "mainvmixer");
    CHECK(gst_bin_add(GST_BIN(pipeline), gvmixer));

    adderbin = get_adder_bin();	
    CHECK(gst_bin_add(GST_BIN(pipeline), adderbin));

    
    if(STREAM_TYPE == VIDEO_PLAYLIST)
    {
        GstElement *tvid;
        tvid = get_videotest_bin();
        gst_bin_add(GST_BIN(pipeline), tvid);
        add_new_vstream_source(tvid, "tsrc", "testvid");
    }

    av = (struct av_source *)malloc(sizeof(struct av_source));
    av->vstruct = add_new_vstream_source(srcbin, "vsrc", "mainvideo");
    av->astruct = add_new_stream_source(srcbin, "asrc%d", "mainaudio");
    gst_pad_add_event_probe(gst_element_get_static_pad(srcbin, "asrc%d"),
            G_CALLBACK(cb_eos_in_videopl_astream), NULL);
    cbprobe = gst_bin_get_by_name(GST_BIN(srcbin), "fdecode");
    gst_pad_add_event_probe(gst_element_get_static_pad(srcbin, "vsrc"), 
            G_CALLBACK(cb_eos_in_videopl_vstream), av);
#ifdef _DEBUG_
    g_print("mainvid: %s", av->vstruct->req_pad_name);
#endif
    apq = gst_element_factory_make("queue2", NULL);
    gst_bin_add(GST_BIN(pipeline), apq);
    gst_element_link(adderbin, apq);

    vpq = gst_element_factory_make("queue2", NULL);
    gst_bin_add(GST_BIN(pipeline), vpq);
    gst_element_link(gvmixer, vpq);

    if(conf->serving_mode == ICECAST_SERV_MODE){
        //FIXME: quite horrid audio chain here must be fixed
        avorb = get_vorbis_enc();
        gst_bin_add(GST_BIN(pipeline), avorb);
        gst_element_link(apq, avorb);

        vtheora = get_theora_bin();
        gst_bin_add(GST_BIN(pipeline), vtheora);
        gst_element_link(vpq, vtheora);

        /* the following element is a bin made of a 
         * oggmux element and the shout2send element */	
        vshoutsend = get_shout_video_out();
        gst_bin_add(GST_BIN(pipeline), vshoutsend);

        gst_pad_link(gst_element_get_static_pad(avorb, "src"),
                gst_element_get_static_pad(vshoutsend, "sink_0"));

        gst_pad_link(gst_element_get_static_pad(vtheora, "src"),
                gst_element_get_static_pad(vshoutsend, "sink_1"));
    }
    else if (conf->serving_mode == RTP_SERV_MODE){

        generalout = get_rtp_output_pipe();
        gst_bin_add(GST_BIN(pipeline), generalout);

        gst_pad_link(gst_element_get_static_pad(apq, "src"),
                    gst_element_get_static_pad(generalout, "asink"));
        gst_pad_link(gst_element_get_static_pad(vpq, "src"),
                gst_element_get_static_pad(generalout, "vsink"));

    }
}
Beispiel #14
0
void 
papaya_audio_tree_build()
{
    GstElement *audio;
    GstElement *sout;

    GstPad *prevpad;
    struct input_source *in_src;
    /* 
     * we handle audio chains for audio live and 
     * playlist here
     */
    switch (STREAM_TYPE){
        case AUDIO_PLAYLIST:
            srcbin = get_source_bin(NULL);
            break;
        case AUDIO_LIVE:
            srcbin = get_alsa_source_bin();
            break;
    }

    gst_bin_add(GST_BIN (pipeline), srcbin);

    silence = get_silence_bin();
    gst_bin_add(GST_BIN(pipeline), silence);

    /* 
     * this is the new code to add sources and link them 
     * to adder and in a list of sources 
     */ 	
    adderbin = get_adder_bin();	
    gst_bin_add(GST_BIN(pipeline), adderbin);

    /* add sources */
    add_new_stream_source(silence, "src", "main silence source");
    in_src = add_new_stream_source(srcbin, 
            "asrc%d", "bootstrap playlist source");
    /* this is used for the playlist EOS handling */
    gst_pad_add_event_probe(gst_element_get_static_pad(srcbin,"asrc%d"), 
            G_CALLBACK(cb_eos_in_filesrc), in_src);

    g_print("active sources %d\n", globsrc->active);

    atee = gst_element_factory_make("tee","tee");
    gst_bin_add(GST_BIN(pipeline),atee);
    gst_element_link(adderbin, atee);

    /*
     * build output is the encoders and converter chain
     */
    audio = build_audio_output_chain();	
    gst_bin_add (GST_BIN (pipeline), audio);

    /* the shout2send element */
    sout = get_shout_out();
    gst_bin_add(GST_BIN(pipeline), sout);
    gst_element_link(audio, sout);


    add_new_outputs(audio, "sink", "a shout2send output");

    /* activate the audio monitor if requested */
    if(audiomon)
        pass_on();	
}
Beispiel #15
0
static void
gst_nle_source_pad_added_cb (GstElement * element, GstPad * pad,
    GstNleSource * nlesrc)
{
  GstCaps *caps;
  const GstStructure *s;
  const gchar *mime;
  GstElement *appsink = NULL;
  GstPad *sink_pad;
  GstAppSinkCallbacks appsink_cbs;
  GstNleSrcItem *item;

  item = (GstNleSrcItem *) g_list_nth_data (nlesrc->queue, nlesrc->index);

  caps = gst_pad_get_caps_reffed (pad);
  s = gst_caps_get_structure (caps, 0);
  mime = gst_structure_get_name (s);
  GST_DEBUG_OBJECT (nlesrc, "Found mime type: %s", mime);

  if (g_strrstr (mime, "video") && !nlesrc->video_linked) {
    appsink = gst_element_factory_make ("appsink", NULL);
    memset (&appsink_cbs, 0, sizeof (appsink_cbs));
    appsink_cbs.eos = gst_nle_source_on_video_eos;
    appsink_cbs.new_preroll = gst_nle_source_on_preroll_buffer;
    appsink_cbs.new_buffer = gst_nle_source_on_video_buffer;
    nlesrc->video_linked = TRUE;
    if (!nlesrc->video_srcpad_added) {
      gst_pad_set_active (nlesrc->video_srcpad, TRUE);
      gst_element_add_pad (GST_ELEMENT (nlesrc),
          gst_object_ref (nlesrc->video_srcpad));
      nlesrc->video_srcpad_added = TRUE;
    }
    gst_pad_add_event_probe (GST_BASE_SINK_PAD (GST_BASE_SINK (appsink)),
        (GCallback) gst_nle_source_video_pad_probe_cb, nlesrc);
    nlesrc->video_eos = FALSE;
  } else if (g_strrstr (mime, "audio") && nlesrc->with_audio
      && !nlesrc->audio_linked && (item ? item->rate == 1.0 : TRUE)) {
    appsink = gst_element_factory_make ("appsink", NULL);
    memset (&appsink_cbs, 0, sizeof (appsink_cbs));
    appsink_cbs.eos = gst_nle_source_on_audio_eos;
    appsink_cbs.new_preroll = gst_nle_source_on_preroll_buffer;
    appsink_cbs.new_buffer = gst_nle_source_on_audio_buffer;
    nlesrc->audio_linked = TRUE;
    if (!nlesrc->audio_srcpad_added) {
      gst_pad_set_active (nlesrc->audio_srcpad, TRUE);
      gst_element_add_pad (GST_ELEMENT (nlesrc),
          gst_object_ref (nlesrc->audio_srcpad));
      nlesrc->audio_srcpad_added = TRUE;
    }
    gst_pad_add_event_probe (GST_BASE_SINK_PAD (GST_BASE_SINK (appsink)),
        (GCallback) gst_nle_source_audio_pad_probe_cb, nlesrc);
    nlesrc->audio_eos = FALSE;
  }
  if (appsink != NULL) {
    g_object_set (appsink, "sync", FALSE, NULL);
    gst_app_sink_set_callbacks (GST_APP_SINK (appsink), &appsink_cbs, nlesrc,
        NULL);
    gst_bin_add (GST_BIN (nlesrc->decoder), appsink);
    sink_pad = gst_element_get_static_pad (appsink, "sink");
    gst_pad_link (pad, sink_pad);
    gst_element_sync_state_with_parent (appsink);
    gst_object_unref (sink_pad);
  }
}
static void
uridecodebin_pad_added_cb (GstElement * uridecodebin, GstPad * pad,
    GstDiscoverer * dc)
{
  PrivateStream *ps;
  GstPad *sinkpad = NULL;
  GstCaps *caps;
  static GstCaps *subs_caps = NULL;

  if (!subs_caps) {
    subs_caps = gst_caps_from_string ("text/plain; text/x-pango-markup; "
        "subpicture/x-pgs; subpicture/x-dvb; application/x-subtitle-unknown; "
        "application/x-ssa; application/x-ass; subtitle/x-kate; "
        "video/x-dvd-subpicture; ");
  }

  GST_DEBUG_OBJECT (dc, "pad %s:%s", GST_DEBUG_PAD_NAME (pad));

  ps = g_slice_new0 (PrivateStream);

  ps->dc = dc;
  ps->pad = pad;
  ps->queue = gst_element_factory_make ("queue", NULL);
  ps->sink = gst_element_factory_make ("fakesink", NULL);

  if (G_UNLIKELY (ps->queue == NULL || ps->sink == NULL))
    goto error;

  g_object_set (ps->sink, "silent", TRUE, NULL);
  g_object_set (ps->queue, "max-size-buffers", 1, "silent", TRUE, NULL);

  caps = gst_pad_get_caps_reffed (pad);

  if (gst_caps_can_intersect (caps, subs_caps)) {
    /* Subtitle streams are sparse and don't provide any information - don't
     * wait for data to preroll */
    g_object_set (ps->sink, "async", FALSE, NULL);
  }

  gst_caps_unref (caps);

  gst_bin_add_many (dc->priv->pipeline, ps->queue, ps->sink, NULL);

  if (!gst_element_link_pads_full (ps->queue, "src", ps->sink, "sink",
          GST_PAD_LINK_CHECK_NOTHING))
    goto error;
  if (!gst_element_sync_state_with_parent (ps->sink))
    goto error;
  if (!gst_element_sync_state_with_parent (ps->queue))
    goto error;

  sinkpad = gst_element_get_static_pad (ps->queue, "sink");
  if (sinkpad == NULL)
    goto error;
  if (gst_pad_link_full (pad, sinkpad,
          GST_PAD_LINK_CHECK_NOTHING) != GST_PAD_LINK_OK)
    goto error;
  gst_object_unref (sinkpad);

  /* Add an event probe */
  gst_pad_add_event_probe (pad, G_CALLBACK (_event_probe), ps);

  DISCO_LOCK (dc);
  dc->priv->streams = g_list_append (dc->priv->streams, ps);
  DISCO_UNLOCK (dc);

  GST_DEBUG_OBJECT (dc, "Done handling pad");

  return;

error:
  GST_ERROR_OBJECT (dc, "Error while handling pad");
  if (sinkpad)
    gst_object_unref (sinkpad);
  if (ps->queue)
    gst_object_unref (ps->queue);
  if (ps->sink)
    gst_object_unref (ps->sink);
  g_slice_free (PrivateStream, ps);
  return;
}
/**
 * gst_wrapper_camera_bin_src_construct_pipeline:
 * @bcamsrc: camerasrc object
 *
 * This function creates and links the elements of the camerasrc bin
 * videosrc ! cspconv ! capsfilter ! crop ! scale ! capsfilter ! tee name=t !
 *    t. ! ... (viewfinder pad)
 *    t. ! output-selector name=outsel
 *        outsel. ! (image pad)
 *        outsel. ! (video pad)
 *
 * Returns: TRUE, if elements were successfully created, FALSE otherwise
 */
static gboolean
gst_wrapper_camera_bin_src_construct_pipeline (GstBaseCameraSrc * bcamsrc)
{
  GstWrapperCameraBinSrc *self = GST_WRAPPER_CAMERA_BIN_SRC (bcamsrc);
  GstBin *cbin = GST_BIN (bcamsrc);
  GstElement *tee;
  gboolean ret = FALSE;
  GstElement *videoscale;
  GstPad *vf_pad;
  GstPad *tee_capture_pad;

  if (self->elements_created)
    return TRUE;

  GST_DEBUG_OBJECT (self, "constructing pipeline");

  /* Add application set or default video src element */
  if (!(self->src_vid_src = gst_camerabin_setup_default_element (cbin,
              self->app_vid_src, "autovideosrc", DEFAULT_VIDEOSRC))) {
    self->src_vid_src = NULL;
    goto done;
  } else {
    if (!gst_camerabin_add_element (cbin, self->src_vid_src)) {
      goto done;
    }
  }
  /* we lost the reference */
  self->app_vid_src = NULL;

  /* add a buffer probe to the src elemento to drop EOS from READY->NULL */
  {
    GstPad *pad;
    pad = gst_element_get_static_pad (self->src_vid_src, "src");

    self->src_event_probe_id = gst_pad_add_event_probe (pad,
        (GCallback) src_event_probe, self);
    gst_object_unref (pad);
  }

  if (!gst_camerabin_create_and_add_element (cbin, "ffmpegcolorspace"))
    goto done;

  if (!(self->src_filter =
          gst_camerabin_create_and_add_element (cbin, "capsfilter")))
    goto done;

  if (!(self->src_zoom_crop =
          gst_camerabin_create_and_add_element (cbin, "videocrop")))
    goto done;
  if (!(self->src_zoom_scale =
          gst_camerabin_create_and_add_element (cbin, "videoscale")))
    goto done;
  if (!(self->src_zoom_filter =
          gst_camerabin_create_and_add_element (cbin, "capsfilter")))
    goto done;

  if (!(tee = gst_camerabin_create_and_add_element (cbin, "tee")))
    goto done;

  /* viewfinder pad */
  vf_pad = gst_element_get_request_pad (tee, "src%d");
  g_object_set (tee, "alloc-pad", vf_pad, NULL);
  gst_object_unref (vf_pad);

  /* the viewfinder should always work, so we add some converters to it */
  if (!gst_camerabin_create_and_add_element (cbin, "ffmpegcolorspace"))
    goto done;
  if (!(videoscale = gst_camerabin_create_and_add_element (cbin, "videoscale")))
    goto done;

  /* image/video pad from tee */
  tee_capture_pad = gst_element_get_request_pad (tee, "src%d");

  self->output_selector =
      gst_element_factory_make ("output-selector", "outsel");
  gst_bin_add (GST_BIN (self), self->output_selector);
  {
    GstPad *pad = gst_element_get_static_pad (self->output_selector, "sink");

    /* check return TODO */
    gst_pad_link (tee_capture_pad, pad);
    gst_object_unref (pad);
  }
  gst_object_unref (tee_capture_pad);

  /* Create the 2 output pads for video and image */
  self->outsel_vidpad =
      gst_element_get_request_pad (self->output_selector, "src%d");
  self->outsel_imgpad =
      gst_element_get_request_pad (self->output_selector, "src%d");

  g_assert (self->outsel_vidpad != NULL);
  g_assert (self->outsel_imgpad != NULL);

  gst_pad_add_buffer_probe (self->outsel_imgpad,
      G_CALLBACK (gst_wrapper_camera_bin_src_imgsrc_probe), self);
  gst_pad_add_buffer_probe (self->outsel_vidpad,
      G_CALLBACK (gst_wrapper_camera_bin_src_vidsrc_probe), self);
  gst_ghost_pad_set_target (GST_GHOST_PAD (self->imgsrc), self->outsel_imgpad);
  gst_ghost_pad_set_target (GST_GHOST_PAD (self->vidsrc), self->outsel_vidpad);

  if (bcamsrc->mode == MODE_IMAGE) {
    g_object_set (self->output_selector, "active-pad", self->outsel_imgpad,
        NULL);
  } else {
    g_object_set (self->output_selector, "active-pad", self->outsel_vidpad,
        NULL);
  }

  /* hook-up the vf ghostpad */
  vf_pad = gst_element_get_static_pad (videoscale, "src");
  gst_ghost_pad_set_target (GST_GHOST_PAD (self->vfsrc), vf_pad);
  gst_object_unref (vf_pad);

  gst_pad_set_active (self->vfsrc, TRUE);
  gst_pad_set_active (self->imgsrc, TRUE);      /* XXX ??? */
  gst_pad_set_active (self->vidsrc, TRUE);      /* XXX ??? */

  ret = TRUE;
  self->elements_created = TRUE;
done:
  return ret;
}
Beispiel #18
0
static void
on_source1_pad_added_cb (GstElement * source, GstPad * pad, gpointer user_data)
{
  gst_pad_add_event_probe (pad, G_CALLBACK (on_source1_pad_event_cb), NULL);
}
QGstreamerPlayerSession::QGstreamerPlayerSession(QObject *parent)
    :QObject(parent),
     m_state(QMediaPlayer::StoppedState),
     m_pendingState(QMediaPlayer::StoppedState),
     m_busHelper(0),
     m_playbin(0),
     m_usePlaybin2(false),
     m_videoSink(0),
     m_pendingVideoSink(0),
     m_nullVideoSink(0),
     m_bus(0),
     m_videoOutput(0),
     m_renderer(0),
     m_volume(100),
     m_playbackRate(1.0),
     m_muted(false),
     m_audioAvailable(false),
     m_videoAvailable(false),
     m_seekable(false),
     m_lastPosition(0),
     m_duration(-1)
{
#ifdef USE_PLAYBIN2
    m_playbin = gst_element_factory_make("playbin2", NULL);
#endif

    if (m_playbin) {
        m_usePlaybin2 = true;

        //GST_PLAY_FLAG_NATIVE_VIDEO omits configuration of ffmpegcolorspace and videoscale,
        //since those elements are included in the video output bin.
        int flags = 0;
        g_object_get(G_OBJECT(m_playbin), "flags", &flags, NULL);
        flags |= GST_PLAY_FLAG_NATIVE_VIDEO;
        g_object_set(G_OBJECT(m_playbin), "flags", flags, NULL);
    } else {
        m_usePlaybin2 = false;
        m_playbin = gst_element_factory_make("playbin", NULL);
    }

    m_videoOutputBin = gst_bin_new("video-output-bin");
    gst_object_ref(GST_OBJECT(m_videoOutputBin));

    m_videoIdentity = gst_element_factory_make("identity", "identity-vo");
    m_nullVideoSink = gst_element_factory_make("fakesink", NULL);
    gst_object_ref(GST_OBJECT(m_nullVideoSink));
    gst_bin_add_many(GST_BIN(m_videoOutputBin), m_videoIdentity, m_nullVideoSink, NULL);
    gst_element_link_many(m_videoIdentity, m_nullVideoSink, NULL);

    //add an event probe before video output to save and repost segment events
    {
        gst_segment_init (&m_segment, GST_FORMAT_TIME);

        GstPad *srcPad = gst_element_get_static_pad(m_videoIdentity, "src");
        gst_pad_add_event_probe(srcPad, G_CALLBACK(new_segment_probe), this);
        gst_pad_add_buffer_probe(srcPad, G_CALLBACK(new_buffer_probe), this);
        gst_object_unref(GST_OBJECT(srcPad));
    }


    m_videoSink = m_nullVideoSink;

    // add ghostpads
    GstPad *pad = gst_element_get_static_pad(m_videoIdentity,"sink");
    gst_element_add_pad(GST_ELEMENT(m_videoOutputBin), gst_ghost_pad_new("videosink", pad));
    gst_object_unref(GST_OBJECT(pad));

    if (m_playbin != 0) {
        // Sort out messages
        m_bus = gst_element_get_bus(m_playbin);
        m_busHelper = new QGstreamerBusHelper(m_bus, this);
        connect(m_busHelper, SIGNAL(message(QGstreamerMessage)), SLOT(busMessage(QGstreamerMessage)));
        m_busHelper->installSyncEventFilter(this);

        g_object_set(G_OBJECT(m_playbin), "video-sink", m_videoOutputBin, NULL);

        g_signal_connect(G_OBJECT(m_playbin), "notify::source", G_CALLBACK(playbinNotifySource), this);

        // Initial volume
        double volume = 1.0;
        g_object_get(G_OBJECT(m_playbin), "volume", &volume, NULL);
        m_volume = int(volume*100);
    }
}
/**
 * gst_wrapper_camera_bin_src_construct_pipeline:
 * @bcamsrc: camerasrc object
 *
 * This function creates and links the elements of the camerasrc bin
 * videosrc ! cspconv ! srcfilter ! cspconv ! capsfilter ! crop ! scale ! \
 * capsfilter ! tee name=t
 *    t. ! ... (viewfinder pad)
 *    t. ! output-selector name=outsel
 *        outsel. ! (image pad)
 *        outsel. ! (video pad)
 *
 * Returns: TRUE, if elements were successfully created, FALSE otherwise
 */
static gboolean
gst_wrapper_camera_bin_src_construct_pipeline (GstBaseCameraSrc * bcamsrc)
{
  GstWrapperCameraBinSrc *self = GST_WRAPPER_CAMERA_BIN_SRC (bcamsrc);
  GstBin *cbin = GST_BIN (bcamsrc);
  GstElement *tee;
  GstElement *filter_csp;
  GstElement *src_csp;
  GstElement *capsfilter;
  gboolean ret = FALSE;
  GstPad *vf_pad;
  GstPad *tee_capture_pad;
  GstPad *src_caps_src_pad;

  if (!self->elements_created) {

    GST_DEBUG_OBJECT (self, "constructing pipeline");

    /* Add application set or default video src element */
    if (!(self->src_vid_src = gst_camerabin_setup_default_element (cbin,
                self->app_vid_src, "autovideosrc", DEFAULT_VIDEOSRC,
                "camerasrc-real-src"))) {
      self->src_vid_src = NULL;
      goto done;
    } else {
      if (!gst_camerabin_add_element (cbin, self->src_vid_src)) {
        goto done;
      }
    }
    /* we lost the reference */
    self->app_vid_src = NULL;

    /* we listen for changes to max-zoom in the video src so that
     * we can proxy them to the basecamerasrc property */
    if (g_object_class_find_property (G_OBJECT_GET_CLASS (bcamsrc), "max-zoom")) {
      g_signal_connect (G_OBJECT (self->src_vid_src), "notify::max-zoom",
          (GCallback) gst_wrapper_camera_bin_src_max_zoom_cb, bcamsrc);
    }

    /* add a buffer probe to the src elemento to drop EOS from READY->NULL */
    {
      GstPad *pad;
      pad = gst_element_get_static_pad (self->src_vid_src, "src");

      self->src_event_probe_id = gst_pad_add_event_probe (pad,
          (GCallback) gst_wrapper_camera_src_src_event_probe, self);
      gst_object_unref (pad);
    }

    if (!gst_camerabin_create_and_add_element (cbin, "ffmpegcolorspace",
            "src-colorspace"))
      goto done;

    if (!(self->src_filter =
            gst_camerabin_create_and_add_element (cbin, "capsfilter",
                "src-capsfilter")))
      goto done;

    /* attach to notify::caps on the first capsfilter and use a callback
     * to recalculate the zoom properties when these caps change and to
     * propagate the caps to the second capsfilter */
    src_caps_src_pad = gst_element_get_static_pad (self->src_filter, "src");
    g_signal_connect (src_caps_src_pad, "notify::caps",
        G_CALLBACK (gst_wrapper_camera_bin_src_caps_cb), self);
    gst_object_unref (src_caps_src_pad);

    if (!(self->src_zoom_crop =
            gst_camerabin_create_and_add_element (cbin, "videocrop",
                "zoom-crop")))
      goto done;
    if (!(self->src_zoom_scale =
            gst_camerabin_create_and_add_element (cbin, "videoscale",
                "zoom-scale")))
      goto done;
    if (!(self->src_zoom_filter =
            gst_camerabin_create_and_add_element (cbin, "capsfilter",
                "zoom-capsfilter")))
      goto done;

    if (!(tee =
            gst_camerabin_create_and_add_element (cbin, "tee",
                "camerasrc-tee")))
      goto done;

    /* viewfinder pad */
    vf_pad = gst_element_get_request_pad (tee, "src%d");
    g_object_set (tee, "alloc-pad", vf_pad, NULL);
    gst_ghost_pad_set_target (GST_GHOST_PAD (self->vfsrc), vf_pad);
    gst_object_unref (vf_pad);

    /* image/video pad from tee */
    tee_capture_pad = gst_element_get_request_pad (tee, "src%d");

    self->output_selector =
        gst_element_factory_make ("output-selector", "outsel");
    g_object_set (self->output_selector, "pad-negotiation-mode", 0, NULL);
    gst_bin_add (GST_BIN (self), self->output_selector);
    {
      GstPad *pad = gst_element_get_static_pad (self->output_selector, "sink");

      /* check return TODO */
      gst_pad_link (tee_capture_pad, pad);
      gst_object_unref (pad);
    }
    gst_object_unref (tee_capture_pad);

    /* Create the 2 output pads for video and image */
    self->outsel_vidpad =
        gst_element_get_request_pad (self->output_selector, "src%d");
    self->outsel_imgpad =
        gst_element_get_request_pad (self->output_selector, "src%d");

    g_assert (self->outsel_vidpad != NULL);
    g_assert (self->outsel_imgpad != NULL);

    gst_pad_add_buffer_probe (self->outsel_imgpad,
        G_CALLBACK (gst_wrapper_camera_bin_src_imgsrc_probe), self);
    gst_pad_add_buffer_probe (self->outsel_vidpad,
        G_CALLBACK (gst_wrapper_camera_bin_src_vidsrc_probe), self);
    gst_ghost_pad_set_target (GST_GHOST_PAD (self->imgsrc),
        self->outsel_imgpad);
    gst_ghost_pad_set_target (GST_GHOST_PAD (self->vidsrc),
        self->outsel_vidpad);

    if (bcamsrc->mode == MODE_IMAGE) {
      g_object_set (self->output_selector, "active-pad", self->outsel_imgpad,
          NULL);
    } else {
      g_object_set (self->output_selector, "active-pad", self->outsel_vidpad,
          NULL);
    }



    gst_pad_set_active (self->vfsrc, TRUE);
    gst_pad_set_active (self->imgsrc, TRUE);    /* XXX ??? */
    gst_pad_set_active (self->vidsrc, TRUE);    /* XXX ??? */
  }

  /* Do this even if pipeline is constructed */

  if (self->video_filter) {
    /* check if we need to replace the current one */
    if (self->video_filter != self->app_vid_filter) {
      gst_bin_remove (cbin, self->video_filter);
      gst_object_unref (self->video_filter);
      self->video_filter = NULL;
      filter_csp = gst_bin_get_by_name (cbin, "filter-colorspace");
      gst_bin_remove (cbin, filter_csp);
      gst_object_unref (filter_csp);
      filter_csp = NULL;
    }
  }

  if (!self->video_filter) {
    if (self->app_vid_filter) {
      self->video_filter = gst_object_ref (self->app_vid_filter);
      filter_csp = gst_element_factory_make ("ffmpegcolorspace",
          "filter-colorspace");
      gst_bin_add_many (cbin, self->video_filter, filter_csp, NULL);
      src_csp = gst_bin_get_by_name (cbin, "src-colorspace");
      capsfilter = gst_bin_get_by_name (cbin, "src-capsfilter");
      if (gst_pad_is_linked (gst_element_get_static_pad (src_csp, "src")))
        gst_element_unlink (src_csp, capsfilter);
      if (!gst_element_link_many (src_csp, self->video_filter, filter_csp,
              capsfilter, NULL))
        goto done;
    }
  }
  ret = TRUE;
  self->elements_created = TRUE;
done:
  return ret;
}