int main (int argc, char *argv[])
{

  GMainLoop *loop;
  GstPad *videopad;
  GstBus *bus;
  guint bus_watch_id;

  file_variations[0]  = MIN_QUALITY;
  file_variations[1]  = SD_QUALITY;
  file_variations[2]  = HD_QUALITY;
  
  scale_variations[0] = SCALE_SD;
  scale_variations[1] = SCALE_HD;
  scale_variations[2] = SCALE_SD;
  scale_variations[3] = SCALE_HD;
  scale_variations[4] = SCALE_MIN;
  scale_variations[5] = SCALE_HD;
  scale_variations[6] = SCALE_SD;
  scale_variations[7] = SCALE_MIN;

  initial_latency     = FALSE;
  current_file        = 1;
  num_changes         = 0;
  runs                = MEASUREMENT_RUNS;
  int opt_index       = 0, c = 0;
  char *scale_str     = DEFAULT_SCALE;
  char *decoder_name  = DEFAULT_DECODER;
  char *sink_name     = DEFAULT_SINK;
  char *filename      = DEFAULT_FILE;
  int display         = WANT_DISPLAY;
  int decoder_option  = WANT_FILE;
  int window_size     = DEFAULT_WINDOW_SIZE;
  int frequency       = DEFAULT_FREQUENCY;
  float ret_factor    = DEFAULT_RET_FACTOR, 
        ext_factor    = DEFAULT_EXT_FACTOR;
  gboolean measure    = DEFAULT_MEASUREMENT;
  gboolean sync       = DEFAULT_SYNC;
  gboolean dynamic    = DEFAULT_DYNAMIC;

  /* Initialisation */
  gst_init (&argc, &argv);

  loop = g_main_loop_new (NULL, FALSE);

  /* Parse options. */
  while(c != -1)
  {
    static struct option long_options[] =
    {
      {"file",      required_argument, 0, 'f'},
      {"uri",       required_argument, 0, 'u'},
      {"retarget",  required_argument, 0, 'r'},
      {"extend",    required_argument, 0, 'e'},
      {"size",      required_argument, 0, 's'},
      {"scale",     required_argument, 0, 'c'},
      {"frequency", required_argument, 0, 'd'},
      {"runs",      required_argument, 0, 'n'},
      {"measure",   no_argument,       0, 'm'},
      {"no-sync",   no_argument,       0, 'p'},
      {"initial",   no_argument,       0, 'i'},
      {"dynamic",   no_argument,       0, 't'}
    };

    c = getopt_long(argc, argv, "f:r:e:s:c:m:p:t", long_options, &opt_index);

    if(c == -1)
      break;

    switch(c)
    {
      case 'f':
        filename = optarg;
        break;
      case 'u':
        filename = optarg;
        decoder_name = "uridecodebin";
        decoder_option = WANT_URI;
        break;
      case 'r':
        ret_factor = strtof(optarg, NULL);
        break;
      case 'e':
        ext_factor = strtof(optarg, NULL);
        break;
      case 's':
        window_size = atoi(optarg);
        break;
      case 'c':
        scale = gst_element_factory_make ("videoscale", "scale");
        capfilt = gst_element_factory_make ("capsfilter","capsfilt");
        if(!scale || !capfilt)
        {
          g_printerr ("One element could not be created. Exiting.\n");
          return -1;
        }
        scale_str = optarg;
        GstCaps *scale_caps =  gst_caps_from_string(scale_str);
        g_assert(scale_caps);
        g_object_set (G_OBJECT (capfilt), "caps", scale_caps, NULL);
        gst_caps_unref(scale_caps);
        break;
      case 'm':
        measure = TRUE;
        break;
      case 'd':
        frequency = atoi(optarg);
        break;
      case 'n':
        runs = atoi(optarg) - 1;
        break;
      case 'p':
        sync = FALSE;
        break;
      case 't':
        dynamic = TRUE;
        break;
      case 'i':
        current_file = 0;
        initial_latency = TRUE;
        break;
      default:
        break;
    }
  }

  /* Create gstreamer elements. */
  pipeline  = gst_pipeline_new ("pipeline");
  que       = gst_element_factory_make ("queue",     "que");
  que2      = gst_element_factory_make ("queue",    "que2");
  seam      = gst_element_factory_make ("seamcrop",  "seamcrop");
  decoder   = gst_element_factory_make (decoder_name, "decoder");
  sink      = gst_element_factory_make (sink_name,   "video-output");

  if (!pipeline || !que || !que2 || !seam || !decoder || !sink) 
  {
    g_printerr ("One element could not be created. Exiting.\n");
    return -1;
  }

  /* Set seamcrop element properties. */
  g_object_set (G_OBJECT (seam), "retargeting-factor", ret_factor, NULL);
  g_object_set (G_OBJECT (seam), "extend-border", ext_factor, NULL);
  g_object_set (G_OBJECT (seam), "frame-window-size", window_size, NULL);
  g_object_set (G_OBJECT (seam), "measurement", measure, NULL);

  /* Disregard stream synchronization if requested. */
  if(sync == FALSE) 
    g_object_set (G_OBJECT (sink), "sync", sync, NULL); 

  /* Options for the decoder. Sets up pipeline and links accordingly. */
  switch (decoder_option)
  {
    case WANT_FILE:
      source = gst_element_factory_make ("filesrc", "file-source");
      if(!source)
      { 
        g_printerr("Filesrc could not be created. Exiting"); 
        return -1;
      }
      g_object_set (G_OBJECT (source), "location", filename, NULL);

      gst_bin_add_many(GST_BIN(pipeline), source, decoder, NULL);
      gst_element_link (source, decoder);
      break;
    case WANT_URI:
      g_object_set (G_OBJECT (decoder), "uri", filename, NULL); 
      gst_bin_add(GST_BIN(pipeline), decoder);
      break;
    default:
      break;
  }

  /* we add a message handler */
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
  gst_object_unref (bus);

  /* set up pad callback connector. */
  g_signal_connect (decoder, "pad-added", G_CALLBACK (on_pad_added), NULL);

  /* set up videobin. */
  video = gst_bin_new("videobin");

  if(scale) {
    videopad = gst_element_get_static_pad(scale,"sink");
    gst_bin_add_many(GST_BIN(video), scale, capfilt, que, seam, que2, sink, NULL);

    // Link elements.
    gst_element_link(scale,capfilt);
    gst_element_link(capfilt,que);
    gst_element_link(que,seam);
    gst_element_link(seam,que2);
    gst_element_link(que2,sink);

  } else {
    videopad = gst_element_get_static_pad(que,"sink");
    gst_bin_add_many(GST_BIN(video), que, seam, que2, sink, NULL);

    // Link elements.
    gst_element_link(que,seam);
    gst_element_link(seam,que2);
    gst_element_link(que2,sink);
  }

  // Add a ghost pad to the video bin so that it can be used as an element.
  gst_element_add_pad(video,
      gst_ghost_pad_new("sink",videopad));

  gst_object_unref(videopad);

  // Add 'video' as element in 'pipeline'.
  gst_bin_add(GST_BIN(pipeline),video);

  /* Set the pipeline to "playing" state*/
  g_print ("Now playing.\n");
  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  /* Timeout to change input resolution */
  if(dynamic)
    g_timeout_add_seconds (frequency, change_input, loop);
  else if(decoder_option == WANT_URI) 
    g_timeout_add_seconds (frequency, force_resolution, loop);

  /* Iterate */
  g_print ("Running...\n");
  g_main_loop_run (loop);

  /* Out of the main loop, clean up nicely */
  g_print ("Returned, stopping playback\n");
  gst_element_set_state (pipeline, GST_STATE_NULL);

  g_print ("Deleting pipeline\n");
  gst_object_unref (GST_OBJECT (pipeline));
  g_source_remove (bus_watch_id);
  g_main_loop_unref (loop);

  return 0;
}
static gboolean
setup_recoder_pipeline (GstSmartEncoder * smart_encoder)
{
  GstPad *tmppad;
  GstCaps *caps;

  /* Fast path */
  if (G_UNLIKELY (smart_encoder->encoder))
    return TRUE;

  GST_DEBUG ("Creating internal decoder and encoder");

  /* Create decoder/encoder */
  caps = gst_pad_get_current_caps (smart_encoder->sinkpad);
  smart_encoder->decoder = get_decoder (caps);
  if (G_UNLIKELY (smart_encoder->decoder == NULL))
    goto no_decoder;
  gst_caps_unref (caps);
  gst_element_set_bus (smart_encoder->decoder, GST_ELEMENT_BUS (smart_encoder));

  caps = gst_pad_get_current_caps (smart_encoder->sinkpad);
  smart_encoder->encoder = get_encoder (caps);
  if (G_UNLIKELY (smart_encoder->encoder == NULL))
    goto no_encoder;
  gst_caps_unref (caps);
  gst_element_set_bus (smart_encoder->encoder, GST_ELEMENT_BUS (smart_encoder));

  GST_DEBUG ("Creating internal pads");

  /* Create internal pads */

  /* Source pad which we'll use to feed data to decoders */
  smart_encoder->internal_srcpad = gst_pad_new ("internal_src", GST_PAD_SRC);
  g_object_set_qdata ((GObject *) smart_encoder->internal_srcpad,
      INTERNAL_ELEMENT, smart_encoder);
  gst_pad_set_active (smart_encoder->internal_srcpad, TRUE);

  /* Sink pad which will get the buffers from the encoder.
   * Note: We don't need an event function since we'll be discarding all
   * of them. */
  smart_encoder->internal_sinkpad = gst_pad_new ("internal_sink", GST_PAD_SINK);
  g_object_set_qdata ((GObject *) smart_encoder->internal_sinkpad,
      INTERNAL_ELEMENT, smart_encoder);
  gst_pad_set_chain_function (smart_encoder->internal_sinkpad, internal_chain);
  gst_pad_set_active (smart_encoder->internal_sinkpad, TRUE);

  GST_DEBUG ("Linking pads to elements");

  /* Link everything */
  tmppad = gst_element_get_static_pad (smart_encoder->encoder, "src");
  if (GST_PAD_LINK_FAILED (gst_pad_link (tmppad,
              smart_encoder->internal_sinkpad)))
    goto sinkpad_link_fail;
  gst_object_unref (tmppad);

  if (!gst_element_link (smart_encoder->decoder, smart_encoder->encoder))
    goto encoder_decoder_link_fail;

  tmppad = gst_element_get_static_pad (smart_encoder->decoder, "sink");
  if (GST_PAD_LINK_FAILED (gst_pad_link (smart_encoder->internal_srcpad,
              tmppad)))
    goto srcpad_link_fail;
  gst_object_unref (tmppad);

  GST_DEBUG ("Done creating internal elements/pads");

  return TRUE;

no_decoder:
  {
    GST_WARNING ("Couldn't find a decoder for %" GST_PTR_FORMAT, caps);
    gst_caps_unref (caps);
    return FALSE;
  }

no_encoder:
  {
    GST_WARNING ("Couldn't find an encoder for %" GST_PTR_FORMAT, caps);
    gst_caps_unref (caps);
    return FALSE;
  }

srcpad_link_fail:
  {
    gst_object_unref (tmppad);
    GST_WARNING ("Couldn't link internal srcpad to decoder");
    return FALSE;
  }

sinkpad_link_fail:
  {
    gst_object_unref (tmppad);
    GST_WARNING ("Couldn't link encoder to internal sinkpad");
    return FALSE;
  }

encoder_decoder_link_fail:
  {
    GST_WARNING ("Couldn't link decoder to encoder");
    return FALSE;
  }
}
static void
pad_added_cb (GstElement * timeline, GstPad * pad, GESPipeline * self)
{
  OutputChain *chain;
  GESTrack *track;
  GstPad *sinkpad;
  GstCaps *caps;
  gboolean reconfigured = FALSE;

  caps = gst_pad_query_caps (pad, NULL);

  GST_DEBUG_OBJECT (self, "new pad %s:%s , caps:%" GST_PTR_FORMAT,
      GST_DEBUG_PAD_NAME (pad), caps);

  gst_caps_unref (caps);

  track = ges_timeline_get_track_for_pad (self->priv->timeline, pad);

  if (G_UNLIKELY (!track)) {
    GST_WARNING_OBJECT (self, "Couldn't find coresponding track !");
    return;
  }

  /* Don't connect track if it's not going to be used */
  if (track->type == GES_TRACK_TYPE_VIDEO &&
      !(self->priv->mode & TIMELINE_MODE_PREVIEW_VIDEO) &&
      !(self->priv->mode & TIMELINE_MODE_RENDER) &&
      !(self->priv->mode & TIMELINE_MODE_SMART_RENDER)) {
    GST_DEBUG_OBJECT (self, "Video track... but we don't need it. Not linking");
  }
  if (track->type == GES_TRACK_TYPE_AUDIO &&
      !(self->priv->mode & TIMELINE_MODE_PREVIEW_AUDIO) &&
      !(self->priv->mode & TIMELINE_MODE_RENDER) &&
      !(self->priv->mode & TIMELINE_MODE_SMART_RENDER)) {
    GST_DEBUG_OBJECT (self, "Audio track... but we don't need it. Not linking");
  }

  /* Get an existing chain or create it */
  if (!(chain = get_output_chain_for_track (self, track)))
    chain = new_output_chain_for_track (self, track);
  chain->srcpad = pad;

  /* Adding tee */
  chain->tee = gst_element_factory_make ("tee", NULL);
  gst_bin_add (GST_BIN_CAST (self), chain->tee);
  gst_element_sync_state_with_parent (chain->tee);

  /* Linking pad to tee */
  sinkpad = gst_element_get_static_pad (chain->tee, "sink");
  gst_pad_link_full (pad, sinkpad, GST_PAD_LINK_CHECK_NOTHING);
  gst_object_unref (sinkpad);

  /* Connect playsink */
  if (self->priv->mode & TIMELINE_MODE_PREVIEW) {
    const gchar *sinkpad_name;
    GstPad *tmppad;

    GST_DEBUG_OBJECT (self, "Connecting to playsink");

    switch (track->type) {
      case GES_TRACK_TYPE_VIDEO:
        sinkpad_name = "video_sink";
        break;
      case GES_TRACK_TYPE_AUDIO:
        sinkpad_name = "audio_sink";
        break;
      case GES_TRACK_TYPE_TEXT:
        sinkpad_name = "text_sink";
        break;
      default:
        GST_WARNING_OBJECT (self, "Can't handle tracks of type %d yet",
            track->type);
        goto error;
    }

    /* Request a sinkpad from playsink */
    if (G_UNLIKELY (!(sinkpad =
                gst_element_get_request_pad (self->priv->playsink,
                    sinkpad_name)))) {
      GST_ERROR_OBJECT (self, "Couldn't get a pad from the playsink !");
      goto error;
    }

    tmppad = gst_element_get_request_pad (chain->tee, "src_%u");
    if (G_UNLIKELY (gst_pad_link_full (tmppad, sinkpad,
                GST_PAD_LINK_CHECK_NOTHING) != GST_PAD_LINK_OK)) {
      GST_ERROR_OBJECT (self, "Couldn't link track pad to playsink");
      gst_object_unref (tmppad);
      goto error;
    }
    chain->blocked_pad = tmppad;
    GST_DEBUG_OBJECT (tmppad, "blocking pad");
    chain->probe_id = gst_pad_add_probe (tmppad,
        GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM, pad_blocked, NULL, NULL);

    GST_DEBUG ("Reconfiguring playsink");

    /* reconfigure playsink */
    g_signal_emit_by_name (self->priv->playsink, "reconfigure", &reconfigured);
    GST_DEBUG ("'reconfigure' returned %d", reconfigured);

    /* We still hold a reference on the sinkpad */
    chain->playsinkpad = sinkpad;
  }

  /* Connect to encodebin */
  if (self->priv->mode & (TIMELINE_MODE_RENDER | TIMELINE_MODE_SMART_RENDER)) {
    GstPad *tmppad;
    GST_DEBUG_OBJECT (self, "Connecting to encodebin");

    if (!chain->encodebinpad) {
      /* Check for unused static pads */
      sinkpad = get_compatible_unlinked_pad (self->priv->encodebin, pad);

      if (sinkpad == NULL) {
        GstCaps *caps = gst_pad_query_caps (pad, NULL);

        /* If no compatible static pad is available, request a pad */
        g_signal_emit_by_name (self->priv->encodebin, "request-pad", caps,
            &sinkpad);
        gst_caps_unref (caps);

        if (G_UNLIKELY (sinkpad == NULL)) {
          GST_ERROR_OBJECT (self, "Couldn't get a pad from encodebin !");
          goto error;
        }
      }
      chain->encodebinpad = sinkpad;
    }

    tmppad = gst_element_get_request_pad (chain->tee, "src_%u");
    if (G_UNLIKELY (gst_pad_link_full (tmppad,
                chain->encodebinpad,
                GST_PAD_LINK_CHECK_NOTHING) != GST_PAD_LINK_OK)) {
      GST_WARNING_OBJECT (self, "Couldn't link track pad to playsink");
      goto error;
    }
    gst_object_unref (tmppad);

  }

  /* If chain wasn't already present, insert it in list */
  if (!get_output_chain_for_track (self, track))
    self->priv->chains = g_list_append (self->priv->chains, chain);

  GST_DEBUG ("done");
  return;

error:
  {
    if (chain->tee) {
      gst_bin_remove (GST_BIN_CAST (self), chain->tee);
    }
    if (sinkpad)
      gst_object_unref (sinkpad);
    g_free (chain);
  }
}
static gboolean
kms_base_mixer_link_sink_pad (KmsBaseMixer * mixer, gint id,
    const gchar * gp_name, const gchar * gp_template_name,
    GstElement * internal_element, const gchar * pad_name,
    const gchar * port_src_pad_name, gulong target_offset)
{
  KmsBaseMixerPortData *port_data;
  gboolean ret;
  GstPad *gp, *target;
  GstPad **port_data_target;

  if (GST_OBJECT_PARENT (internal_element) != GST_OBJECT (mixer)) {
    GST_ERROR_OBJECT (mixer, "Cannot link %" GST_PTR_FORMAT " wrong hierarchy",
        internal_element);
    return FALSE;
  }

  target = gst_element_get_static_pad (internal_element, pad_name);
  if (target == NULL) {
    target = gst_element_get_request_pad (internal_element, pad_name);
  }

  if (target == NULL) {
    GST_ERROR_OBJECT (mixer, "Cannot get target pad");
    return FALSE;
  }

  KMS_BASE_MIXER_LOCK (mixer);

  port_data = g_hash_table_lookup (mixer->priv->ports, &id);

  if (port_data == NULL) {
    ret = FALSE;
    goto end;
  }

  port_data_target = G_STRUCT_MEMBER_P (port_data, target_offset);
  *port_data_target = g_object_ref (target);

  gp = gst_element_get_static_pad (GST_ELEMENT (mixer), gp_name);
  if (gp != NULL) {
    ret = gst_ghost_pad_set_target (GST_GHOST_PAD (gp), target);
    g_object_unref (gp);
  } else {
    GstPad *src_pad = gst_element_get_static_pad (port_data->port,
        port_src_pad_name);

    if (src_pad != NULL) {
      ret = kms_base_mixer_create_and_link_ghost_pad (mixer, src_pad,
          gp_name, gp_template_name, target);
      g_object_unref (src_pad);
    } else {
      ret = TRUE;
    }
  }

  GST_DEBUG_OBJECT (mixer, "Audio target pad for port %d: %" GST_PTR_FORMAT,
      port_data->id, port_data->audio_sink_target);
  GST_DEBUG_OBJECT (mixer, "Video target pad for port %d: %" GST_PTR_FORMAT,
      port_data->id, port_data->video_sink_target);

end:

  KMS_BASE_MIXER_UNLOCK (mixer);

  g_object_unref (target);

  return ret;
}
/*
 * owr_local_media_source_get_pad
 *
 * The beginning of a media source chain in the pipeline looks like this:
 *                                                             +------------+
 *                                                         /---+ inter*sink |
 * +--------+    +--------+   +------------+   +-----+    /    +------------+
 * | source +----+ scale? +---+ capsfilter +---+ tee +---/
 * +--------+    +--------+   +------------+   +-----+   \
 *                                                        \    +------------+
 *                                                         \---+ inter*sink |
 *                                                             +------------+
 *
 * For each newly requested pad a new inter*sink is added to the tee.
 * Note that this is a completely independent pipeline, and the complete
 * pipeline is only created once for a specific media source.
 *
 * Then for each newly requested pad another bin with a inter*src is
 * created, which is then going to be part of the transport agent
 * pipeline. The ghostpad of it is what we return here.
 *
 * +-----------+   +-------------------------------+   +----------+
 * | inter*src +---+ converters/queues/capsfilters +---+ ghostpad |
 * +-----------+   +-------------------------------+   +----------+
 *
 */
static GstElement *owr_local_media_source_request_source(OwrMediaSource *media_source, GstCaps *caps)
{
    OwrLocalMediaSource *local_source;
    OwrLocalMediaSourcePrivate *priv;
    GstElement *source_element = NULL;
    GstElement *source_pipeline;
    GHashTable *event_data;
    GValue *value;
#if defined(__linux__) && !defined(__ANDROID__)
    gchar *tmp;
#endif

    g_assert(media_source);
    local_source = OWR_LOCAL_MEDIA_SOURCE(media_source);
    priv = local_source->priv;

    /* only create the source bin for this media source once */
    if ((source_pipeline = _owr_media_source_get_source_bin(media_source)))
        GST_DEBUG_OBJECT(media_source, "Re-using existing source element/bin");
    else {
        OwrMediaType media_type = OWR_MEDIA_TYPE_UNKNOWN;
        OwrSourceType source_type = OWR_SOURCE_TYPE_UNKNOWN;
        GstElement *source, *source_process = NULL, *capsfilter = NULL, *tee;
        GstPad *sinkpad, *source_pad;
        GEnumClass *media_enum_class, *source_enum_class;
        GEnumValue *media_enum_value, *source_enum_value;
        gchar *bin_name;
        GstCaps *source_caps;
        GstBus *bus;
        GSource *bus_source;

        event_data = _owr_value_table_new();
        value = _owr_value_table_add(event_data, "start_time", G_TYPE_INT64);
        g_value_set_int64(value, g_get_monotonic_time());

        g_object_get(media_source, "media-type", &media_type, "type", &source_type, NULL);

        media_enum_class = G_ENUM_CLASS(g_type_class_ref(OWR_TYPE_MEDIA_TYPE));
        source_enum_class = G_ENUM_CLASS(g_type_class_ref(OWR_TYPE_SOURCE_TYPE));
        media_enum_value = g_enum_get_value(media_enum_class, media_type);
        source_enum_value = g_enum_get_value(source_enum_class, source_type);

        bin_name = g_strdup_printf("local-%s-%s-source-bin-%u",
            media_enum_value ? media_enum_value->value_nick : "unknown",
            source_enum_value ? source_enum_value->value_nick : "unknown",
            g_atomic_int_add(&unique_bin_id, 1));

        g_type_class_unref(media_enum_class);
        g_type_class_unref(source_enum_class);

        source_pipeline = gst_pipeline_new(bin_name);
        gst_pipeline_use_clock(GST_PIPELINE(source_pipeline), gst_system_clock_obtain());
        gst_element_set_base_time(source_pipeline, _owr_get_base_time());
        gst_element_set_start_time(source_pipeline, GST_CLOCK_TIME_NONE);
        g_free(bin_name);
        bin_name = NULL;

#ifdef OWR_DEBUG
        g_signal_connect(source_pipeline, "deep-notify", G_CALLBACK(_owr_deep_notify), NULL);
#endif

        bus = gst_pipeline_get_bus(GST_PIPELINE(source_pipeline));
        bus_source = gst_bus_create_watch(bus);
        g_source_set_callback(bus_source, (GSourceFunc) bus_call, media_source, NULL);
        g_source_attach(bus_source, _owr_get_main_context());
        g_source_unref(bus_source);

        GST_DEBUG_OBJECT(local_source, "media_type: %d, type: %d", media_type, source_type);

        if (media_type == OWR_MEDIA_TYPE_UNKNOWN || source_type == OWR_SOURCE_TYPE_UNKNOWN) {
            GST_ERROR_OBJECT(local_source,
                "Cannot connect source with unknown type or media type to other component");
            goto done;
        }

        switch (media_type) {
        case OWR_MEDIA_TYPE_AUDIO:
            {
            switch (source_type) {
            case OWR_SOURCE_TYPE_CAPTURE:
                CREATE_ELEMENT(source, AUDIO_SRC, "audio-source");
#if !defined(__APPLE__) || !TARGET_IPHONE_SIMULATOR
/*
    Default values for buffer-time and latency-time on android are 200ms and 20ms.
    The minimum latency-time that can be used on Android is 20ms, and using
    a 40ms buffer-time with a 20ms latency-time causes crackling audio.
    So let's just stick with the defaults.
*/
#if !defined(__ANDROID__)
                g_object_set(source, "buffer-time", G_GINT64_CONSTANT(40000),
                    "latency-time", G_GINT64_CONSTANT(10000), NULL);
#endif
                if (priv->device_index > -1) {
#ifdef __APPLE__
                    g_object_set(source, "device", priv->device_index, NULL);
#elif defined(__linux__) && !defined(__ANDROID__)
                    tmp = g_strdup_printf("%d", priv->device_index);
                    g_object_set(source, "device", tmp, NULL);
                    g_free(tmp);
#endif
                }
#endif
                break;
            case OWR_SOURCE_TYPE_TEST:
                CREATE_ELEMENT(source, "audiotestsrc", "audio-source");
                g_object_set(source, "is-live", TRUE, NULL);
                break;
            case OWR_SOURCE_TYPE_UNKNOWN:
            default:
                g_assert_not_reached();
                goto done;
            }

            break;
            }
        case OWR_MEDIA_TYPE_VIDEO:
        {
            GstPad *srcpad;
            GstCaps *device_caps;

            switch (source_type) {
            case OWR_SOURCE_TYPE_CAPTURE:
                CREATE_ELEMENT(source, VIDEO_SRC, "video-source");
                if (priv->device_index > -1) {
#if defined(__APPLE__) && !TARGET_IPHONE_SIMULATOR
                    g_object_set(source, "device-index", priv->device_index, NULL);
#elif defined(__ANDROID__)
                    g_object_set(source, "cam-index", priv->device_index, NULL);
#elif defined(__linux__)
                    tmp = g_strdup_printf("/dev/video%d", priv->device_index);
                    g_object_set(source, "device", tmp, NULL);
                    g_free(tmp);
#endif
                }
                break;
            case OWR_SOURCE_TYPE_TEST: {
                GstElement *src, *time;
                GstPad *srcpad;

                source = gst_bin_new("video-source");

                CREATE_ELEMENT(src, "videotestsrc", "videotestsrc");
                g_object_set(src, "is-live", TRUE, NULL);
                gst_bin_add(GST_BIN(source), src);

                time = gst_element_factory_make("timeoverlay", "timeoverlay");
                if (time) {
                    g_object_set(time, "font-desc", "Sans 60", NULL);
                    gst_bin_add(GST_BIN(source), time);
                    gst_element_link(src, time);
                    srcpad = gst_element_get_static_pad(time, "src");
                } else
                    srcpad = gst_element_get_static_pad(src, "src");

                gst_element_add_pad(source, gst_ghost_pad_new("src", srcpad));
                gst_object_unref(srcpad);

                break;
            }
            case OWR_SOURCE_TYPE_UNKNOWN:
            default:
                g_assert_not_reached();
                goto done;
            }

            /* First try to see if we can just get the format we want directly */

            source_caps = gst_caps_new_empty();
#if GST_CHECK_VERSION(1, 5, 0)
            gst_caps_foreach(caps, fix_video_caps_framerate, source_caps);
#else
            _owr_gst_caps_foreach(caps, fix_video_caps_framerate, source_caps);
#endif
            /* Now see what the device can really produce */
            srcpad = gst_element_get_static_pad(source, "src");
            gst_element_set_state(source, GST_STATE_READY);
            device_caps = gst_pad_query_caps(srcpad, source_caps);

            if (gst_caps_is_empty(device_caps)) {
                /* Let's see if it works when we drop format constraints (which can be dealt with downsteram) */
                GstCaps *tmp = source_caps;
                source_caps = gst_caps_new_empty();
#if GST_CHECK_VERSION(1, 5, 0)
                gst_caps_foreach(tmp, fix_video_caps_format, source_caps);
#else
                _owr_gst_caps_foreach(tmp, fix_video_caps_format, source_caps);
#endif
                gst_caps_unref(tmp);

                gst_caps_unref(device_caps);
                device_caps = gst_pad_query_caps(srcpad, source_caps);

                if (gst_caps_is_empty(device_caps)) {
                    /* Accepting any format didn't work, we're going to hope that scaling fixes it */
                    CREATE_ELEMENT(source_process, "videoscale", "video-source-scale");
                    gst_bin_add(GST_BIN(source_pipeline), source_process);
                }
            }

            gst_caps_unref(device_caps);
            gst_object_unref(srcpad);

#if defined(__APPLE__) && TARGET_OS_IPHONE && !TARGET_IPHONE_SIMULATOR
            /* Force NV12 on iOS else the source can negotiate BGRA
             * ercolorspace can do NV12 -> BGRA and NV12 -> I420 which is what
             * is needed for Bowser */
            gst_caps_set_simple(source_caps, "format", G_TYPE_STRING, "NV12", NULL);
#endif

            CREATE_ELEMENT(capsfilter, "capsfilter", "video-source-capsfilter");
            g_object_set(capsfilter, "caps", source_caps, NULL);
            gst_caps_unref(source_caps);
            gst_bin_add(GST_BIN(source_pipeline), capsfilter);

            break;
        }
        case OWR_MEDIA_TYPE_UNKNOWN:
        default:
            g_assert_not_reached();
            goto done;
        }
        g_assert(source);

        source_pad = gst_element_get_static_pad(source, "src");
        g_signal_connect(source_pad, "notify::caps", G_CALLBACK(on_caps), media_source);
        gst_object_unref(source_pad);

        CREATE_ELEMENT(tee, "tee", "source-tee");
        g_object_set(tee, "allow-not-linked", TRUE, NULL);

        gst_bin_add_many(GST_BIN(source_pipeline), source, tee, NULL);

        /* Many sources don't like reconfiguration and it's pointless
         * here anyway right now. No need to reconfigure whenever something
         * is added to the tee or removed.
         * We will have to implement reconfiguration differently later by
         * selecting the best caps based on all consumers.
         */
        sinkpad = gst_element_get_static_pad(tee, "sink");
        gst_pad_add_probe(sinkpad, GST_PAD_PROBE_TYPE_EVENT_UPSTREAM, drop_reconfigure_event, NULL, NULL);
        gst_object_unref(sinkpad);

        if (!source)
            GST_ERROR_OBJECT(media_source, "Failed to create source element!");

        if (capsfilter) {
            LINK_ELEMENTS(capsfilter, tee);
            if (source_process) {
                LINK_ELEMENTS(source_process, capsfilter);
                LINK_ELEMENTS(source, source_process);
            } else
                LINK_ELEMENTS(source, capsfilter);
        } else if (source_process) {
            LINK_ELEMENTS(source_process, tee);
            LINK_ELEMENTS(source, source_process);
        } else
            LINK_ELEMENTS(source, tee);

        gst_element_sync_state_with_parent(tee);
        if (capsfilter)
            gst_element_sync_state_with_parent(capsfilter);
        if (source_process)
            gst_element_sync_state_with_parent(source_process);
        gst_element_sync_state_with_parent(source);

        _owr_media_source_set_source_bin(media_source, source_pipeline);
        _owr_media_source_set_source_tee(media_source, tee);
        if (gst_element_set_state(source_pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
            GST_ERROR("Failed to set local source pipeline %s to playing", GST_OBJECT_NAME(source_pipeline));
            /* FIXME: We should handle this and don't expose the source */
        }

        value = _owr_value_table_add(event_data, "end_time", G_TYPE_INT64);
        g_value_set_int64(value, g_get_monotonic_time());
        OWR_POST_EVENT(media_source, LOCAL_SOURCE_STARTED, event_data);

        g_signal_connect(tee, "pad-removed", G_CALLBACK(tee_pad_removed_cb), media_source);
    }
    gst_object_unref(source_pipeline);

    source_element = OWR_MEDIA_SOURCE_CLASS(owr_local_media_source_parent_class)->request_source(media_source, caps);

done:
    return source_element;
}
Exemple #6
0
/**
 * Set up the Gstreamer pipeline. The playbin element is used to decode
 * all kinds of different formats. The capsfilter is used to deliver the
 * audio in a fixed format (X Hz, 1-2 channels, 16 bit signed)
 *
 * The pipeline looks like this:
 *
 * <pre>
 *  .--------------.    .------------------------------------------.
 *  |    playbin   |    |mybin    .------------.   .------------.  |
 *  |----.    .----|    |-----.   | capsfilter |   |  fakesink  |  |
 *  |sink|    |src |--->|ghost|   |----.   .---|   |----.   .---|  |    handoff
 *  |----'    '----|    |pad  |-->|sink|   |src|-->|sink|   |src|--+--> handler
 *  |              |    |-----'   '------------'   '------------'  |
 *  '--------------'    '------------------------------------------'
 * </pre>
 *
 * @param st Audio source state
 *
 * @return 0 if success, otherwise errorcode
 */
static int gst_setup(struct ausrc_st *st)
{
	GstBus *bus;
	GstPad *pad;

	st->loop = g_main_loop_new(NULL, FALSE);

	st->pipeline = gst_pipeline_new("pipeline");
	if (!st->pipeline) {
		warning("gst: failed to create pipeline element\n");
		return ENOMEM;
	}

	/********************* Player BIN **************************/

	st->source = gst_element_factory_make("playbin", "source");
	if (!st->source) {
		warning("gst: failed to create playbin source element\n");
		return ENOMEM;
	}

	/********************* My BIN **************************/

	st->bin = gst_bin_new("mybin");

	st->capsfilt = gst_element_factory_make("capsfilter", NULL);
	if (!st->capsfilt) {
		warning("gst: failed to create capsfilter element\n");
		return ENOMEM;
	}

	set_caps(st);

	st->sink = gst_element_factory_make("fakesink", "sink");
	if (!st->sink) {
		warning("gst: failed to create sink element\n");
		return ENOMEM;
	}

	gst_bin_add_many(GST_BIN(st->bin), st->capsfilt, st->sink, NULL);
	gst_element_link_many(st->capsfilt, st->sink, NULL);

	/* add ghostpad */
	pad = gst_element_get_static_pad(st->capsfilt, "sink");
	gst_element_add_pad(st->bin, gst_ghost_pad_new("sink", pad));
	gst_object_unref(GST_OBJECT(pad));

	/* put all elements in a bin */
	gst_bin_add_many(GST_BIN(st->pipeline), st->source, NULL);

	/* Override audio-sink handoff handler */
	g_object_set(G_OBJECT(st->sink), "signal-handoffs", TRUE, NULL);
	g_signal_connect(st->sink, "handoff", G_CALLBACK(handoff_handler), st);

	g_object_set(G_OBJECT(st->source), "audio-sink", st->bin, NULL);

	/********************* Misc **************************/

	/* Bus watch */
	bus = gst_pipeline_get_bus(GST_PIPELINE(st->pipeline));
	gst_bus_add_watch(bus, bus_watch_handler, st);
	gst_object_unref(bus);

	/* Set URI */
	g_object_set(G_OBJECT(st->source), "uri", st->uri, NULL);

	return 0;
}
bool GStreamerCameraFrameSourceImpl::InitializeGstPipeLine()
{
    GstStateChangeReturn status;
    end = true;

    pipeline = GST_PIPELINE(gst_pipeline_new(NULL));
    if (pipeline == NULL)
    {
        printf("Cannot create Gstreamer pipeline\n");
        return false;
    }

    bus = gst_pipeline_get_bus(GST_PIPELINE (pipeline));

    // create v4l2src
    GstElement * v4l2src = gst_element_factory_make("v4l2src", NULL);
    if (v4l2src == NULL)
    {
        printf("Cannot create v4l2src\n");
        FinalizeGstPipeLine();

        return false;
    }

    std::ostringstream cameraDev;
    cameraDev << "/dev/video" << cameraIdx;
    g_object_set(G_OBJECT(v4l2src), "device", cameraDev.str().c_str(), NULL);

    gst_bin_add(GST_BIN(pipeline), v4l2src);

    // create color convert element
    GstElement * color = gst_element_factory_make(COLOR_ELEM, NULL);
    if (color == NULL)
    {
        printf("Cannot create %s element\n", COLOR_ELEM);
        FinalizeGstPipeLine();

        return false;
    }

    gst_bin_add(GST_BIN(pipeline), color);

    // create appsink element
    sink = gst_element_factory_make("appsink", NULL);
    if (sink == NULL)
    {
        printf("Cannot create appsink element\n");
        FinalizeGstPipeLine();

        return false;
    }

    gst_bin_add(GST_BIN(pipeline), sink);

    // if initial values for FrameSource::Parameters are not
    // specified, let's set them manually to prevent very huge images
    if (configuration.frameWidth == (vx_uint32)-1)
        configuration.frameWidth = 1920;
    if (configuration.frameHeight == (vx_uint32)-1)
        configuration.frameHeight = 1080;
    if (configuration.fps == (vx_uint32)-1)
        configuration.fps = 30;

#if GST_VERSION_MAJOR == 0
    GstCaps* caps_v42lsrc = gst_caps_new_simple ("video/x-raw-rgb",
                 "width", GST_TYPE_INT_RANGE, 1, (int)configuration.frameWidth,
                 "height", GST_TYPE_INT_RANGE, 1, (int)configuration.frameHeight,
                 "framerate", GST_TYPE_FRACTION, (int)configuration.fps,
                 NULL);
#else
    std::ostringstream stream;
    stream << "video/x-raw, format=(string){RGB, GRAY8}, width=[1," << configuration.frameWidth <<
              "], height=[1," << configuration.frameHeight << "], framerate=" << configuration.fps << "/1;";

    GstCaps* caps_v42lsrc = gst_caps_from_string(stream.str().c_str());
#endif

    if (caps_v42lsrc == NULL)
    {
        printf("Failed to create caps\n");
        FinalizeGstPipeLine();

        return false;
    }

    // link elements
    if (!gst_element_link_filtered(v4l2src, color, caps_v42lsrc))
    {
        printf("GStreamer: cannot link v4l2src -> color using caps\n");
        FinalizeGstPipeLine();
        gst_caps_unref(caps_v42lsrc);

        return false;
    }
    gst_caps_unref(caps_v42lsrc);

    // link elements
    if (!gst_element_link(color, sink))
    {
        printf("GStreamer: cannot link color -> appsink\n");
        FinalizeGstPipeLine();

        return false;
    }

    gst_app_sink_set_max_buffers (GST_APP_SINK(sink), 1);
    gst_app_sink_set_drop (GST_APP_SINK(sink), true);

    // do not emit signals: all calls will be synchronous and blocking
    gst_app_sink_set_emit_signals (GST_APP_SINK(sink), 0);

#if GST_VERSION_MAJOR == 0
    GstCaps* caps_appsink = gst_caps_new_simple("video/x-raw-rgb",
                                                "bpp",        G_TYPE_INT, 24,
                                                "red_mask",   G_TYPE_INT, 0xFF0000,
                                                "green_mask", G_TYPE_INT, 0x00FF00,
                                                "blue_mask",  G_TYPE_INT, 0x0000FF,
                                                NULL);
#else
    // support 1 and 3 channel 8 bit data
    GstCaps* caps_appsink = gst_caps_from_string("video/x-raw, format=(string){RGB, GRAY8};");
#endif
    gst_app_sink_set_caps(GST_APP_SINK(sink), caps_appsink);
    gst_caps_unref(caps_appsink);

    // Force pipeline to play video as fast as possible, ignoring system clock
    gst_pipeline_use_clock(pipeline, NULL);

    status = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
    handleGStreamerMessages();

    if (status == GST_STATE_CHANGE_ASYNC)
    {
        // wait for status update
        status = gst_element_get_state(GST_ELEMENT(pipeline), NULL, NULL, GST_CLOCK_TIME_NONE);
    }
    if (status == GST_STATE_CHANGE_FAILURE)
    {
        printf("GStreamer: unable to start playback\n");
        FinalizeGstPipeLine();

        return false;
    }

    std::unique_ptr<GstPad, GStreamerObjectDeleter> pad(gst_element_get_static_pad(color, "src"));
#if GST_VERSION_MAJOR == 0
    std::unique_ptr<GstCaps, GStreamerObjectDeleter> bufferCaps(gst_pad_get_caps(pad.get()));
#else
    std::unique_ptr<GstCaps, GStreamerObjectDeleter> bufferCaps(gst_pad_get_current_caps(pad.get()));
#endif

    const GstStructure *structure = gst_caps_get_structure(bufferCaps.get(), 0);

    int width, height;
    if (!gst_structure_get_int(structure, "width", &width))
    {
        handleGStreamerMessages();
        printf("Cannot query video width\n");
    }

    if (!gst_structure_get_int(structure, "height", &height))
    {
        handleGStreamerMessages();
        printf("Cannot query video height\n");
    }

    configuration.frameWidth = static_cast<vx_uint32>(width);
    configuration.frameHeight = static_cast<vx_uint32>(height);

    gint num = 0, denom = 1;
    if (!gst_structure_get_fraction(structure, "framerate", &num, &denom))
    {
        handleGStreamerMessages();
        printf("Cannot query video fps\n");
    }

    configuration.fps = static_cast<float>(num) / denom;
    end = false;

    return true;
}
Exemple #8
0
static void need_data_callback (GstAppSrc *src, guint length, gpointer user_data)
{
    EncoderStream *stream = (EncoderStream *)user_data;
    gint current_position;
    GstBuffer *buffer;
    GstPad *pad;
    GstEvent *event;
    Encoder *encoder;

    current_position = (stream->current_position + 1) % SOURCE_RING_SIZE;
    for (;;) {
        if (stream->state != NULL) {
            stream->state->last_heartbeat = gst_clock_get_time (stream->system_clock);
        }
        /* insure next buffer isn't current buffer */
        if ((current_position == stream->source->current_position) || stream->source->current_position == -1) {
            if ((current_position == stream->source->current_position) && stream->source->eos) {
                GstFlowReturn ret;

                ret = gst_app_src_end_of_stream (src);
                GST_INFO ("EOS of source %s, tell encoder %s, return %s", stream->source->name, stream->name, gst_flow_get_name (ret));
                break;
            }
            GST_DEBUG ("waiting %s source ready", stream->name);
            g_usleep (50000); /* wiating 50ms */
            continue;
        }

        /* first buffer, set caps. */
        if (stream->current_position == -1) {
            GstCaps *caps;
            caps = gst_sample_get_caps (stream->source->ring[current_position]);
            gst_app_src_set_caps (src, caps);
            GST_INFO ("set stream %s caps: %s", stream->name, gst_caps_to_string (caps));
        }

        buffer = gst_sample_get_buffer (stream->source->ring[current_position]);
        GST_DEBUG ("%s encoder position %d; timestamp %" GST_TIME_FORMAT " source position %d",
                stream->name,   
                stream->current_position,
                GST_TIME_ARGS (GST_BUFFER_PTS (buffer)),
                stream->source->current_position);

        encoder = stream->encoder;
        /* segment_duration != 0? with m3u8playlist conf */
        if ((encoder->segment_duration != 0) && !encoder->has_tssegment) {
            if (encoder->duration_accumulation >= encoder->segment_duration) {
                GstClockTime running_time;

                encoder->last_segment_duration = encoder->duration_accumulation;
                GST_ERROR ("last segment duration: %lu", encoder->last_segment_duration);
                running_time = GST_BUFFER_PTS (buffer);
                /* force key unit? */
                if (encoder->has_video) {
                    pad = gst_element_get_static_pad ((GstElement *)src, "src");
                    event = gst_video_event_new_downstream_force_key_unit (running_time,
                            running_time,
                            running_time,
                            TRUE,
                            encoder->force_key_count);
                    gst_pad_push_event (pad, event);

                } else {
                    encoder->last_running_time = running_time;
                }
                encoder->force_key_count++;
                encoder->duration_accumulation = 0;
            }
            encoder->duration_accumulation += GST_BUFFER_DURATION (buffer);
        }

        /* push buffer */
        if (gst_app_src_push_buffer (src, gst_buffer_ref (buffer)) != GST_FLOW_OK) {
            GST_ERROR ("%s, gst_app_src_push_buffer failure.", stream->name);
        }

        if (stream->state != NULL) {
            stream->state->current_timestamp = GST_BUFFER_PTS (buffer);
        }

        break;
    }
    stream->current_position = current_position;
}
Exemple #9
0
static gint create_encoder_pipeline (Encoder *encoder)
{
    GstElement *pipeline, *element;
    Bin *bin;
    Link *link;
    GSList *bins, *links, *elements;
    GstElementFactory *element_factory;
    GType type;
    EncoderStream *stream;
    GstAppSrcCallbacks callbacks = {
        need_data_callback,
        NULL,
        NULL
    };
    GstAppSinkCallbacks encoder_appsink_callbacks = {
        NULL,
        NULL,
        new_sample_callback
    };
    GstCaps *caps;
    GstBus *bus;

    pipeline = gst_pipeline_new (NULL);

    /* add element to pipeline first. */
    bins = encoder->bins;
    while (bins != NULL) {
        bin = bins->data;
        elements = bin->elements;
        while (elements != NULL) {
            element = elements->data;
            if (!gst_bin_add (GST_BIN (pipeline), element)) {
                GST_ERROR ("add element %s to bin %s error.", gst_element_get_name (element), bin->name);
                return 1;
            }
            elements = g_slist_next (elements);
        }
        bins = g_slist_next (bins);
    }

    /* then links element. */
    bins = encoder->bins;
    while (bins != NULL) {
        bin = bins->data;
        element = bin->first;
        element_factory = gst_element_get_factory (element);
        type = gst_element_factory_get_element_type (element_factory);
        stream = NULL;
        if (g_strcmp0 ("GstAppSrc", g_type_name (type)) == 0) {
            GST_INFO ("Encoder appsrc found.");
            stream = encoder_get_stream (encoder, bin->name);
            gst_app_src_set_callbacks (GST_APP_SRC (element), &callbacks, stream, NULL);
        }
        element = bin->last;
        element_factory = gst_element_get_factory (element);
        type = gst_element_factory_get_element_type (element_factory);
        if ((g_strcmp0 ("GstAppSink", g_type_name (type)) == 0) ||
                (g_strcmp0 ("GstHlsSink", g_type_name (type)) == 0) ||
                (g_strcmp0 ("GstFileSink", g_type_name (type)) == 0)) {
            GstPad *pad;

            if (g_strcmp0 ("GstAppSink", g_type_name (type)) == 0) {
                GST_INFO ("Encoder appsink found.");
                gst_app_sink_set_callbacks (GST_APP_SINK (element), &encoder_appsink_callbacks, encoder, NULL);
            }
            pad = gst_element_get_static_pad (element, "sink");
            gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, encoder_appsink_event_probe, encoder, NULL);
        }
        links = bin->links;
        while (links != NULL) {
            link = links->data;
            GST_INFO ("link element: %s -> %s", link->src_name, link->sink_name);
            if (link->caps != NULL) {
                caps = gst_caps_from_string (link->caps);
                gst_element_link_filtered (link->src, link->sink, caps);
                gst_caps_unref (caps);

            } else {
                gst_element_link (link->src, link->sink);
            }
            links = g_slist_next (links);
        }
        bins = g_slist_next (bins);
    }
    bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
    gst_bus_add_watch (bus, bus_callback, encoder);
    g_object_unref (bus);
    encoder->pipeline = pipeline;

    return 0;
}
Exemple #10
0
static gboolean
gst_switch_commit_new_kid (GstSwitchSink * sink)
{
  GstPad *targetpad;
  GstState kid_state;
  GstElement *new_kid, *old_kid;
  gboolean is_fakesink = FALSE;
  GstBus *bus;

  /* need locking around member accesses */
  GST_OBJECT_LOCK (sink);
  /* If we're currently changing state, set the child to the next state
   * we're transitioning too, rather than our current state which is 
   * about to change */
  if (GST_STATE_NEXT (sink) != GST_STATE_VOID_PENDING)
    kid_state = GST_STATE_NEXT (sink);
  else
    kid_state = GST_STATE (sink);

  new_kid = sink->new_kid;
  sink->new_kid = NULL;
  GST_OBJECT_UNLOCK (sink);

  /* Fakesink by default if NULL is passed as the new child */
  if (new_kid == NULL) {
    GST_DEBUG_OBJECT (sink, "Replacing kid with fakesink");
    new_kid = gst_element_factory_make ("fakesink", "testsink");
    if (new_kid == NULL) {
      GST_ERROR_OBJECT (sink, "Failed to create fakesink");
      return FALSE;
    }
    /* Add a reference, as it would if the element came from sink->new_kid */
    gst_object_ref (new_kid);
    g_object_set (new_kid, "sync", TRUE, NULL);
    is_fakesink = TRUE;
  } else {
    GST_DEBUG_OBJECT (sink, "Setting new kid");
  }

  /* set temporary bus of our own to catch error messages from the child
   * (could we just set our own bus on it, or would the state change messages
   * from the not-yet-added element confuse the state change algorithm? Let's
   * play it safe for now) */
  bus = gst_bus_new ();
  gst_element_set_bus (new_kid, bus);
  gst_object_unref (bus);

  if (gst_element_set_state (new_kid, kid_state) == GST_STATE_CHANGE_FAILURE) {
    GstMessage *msg;

    /* check if child posted an error message and if so re-post it on our bus
     * so that the application gets to see a decent error and not our generic
     * fallback error message which is completely indecipherable to the user */
    msg = gst_bus_pop_filtered (GST_ELEMENT_BUS (new_kid), GST_MESSAGE_ERROR);
    if (msg) {
      GST_INFO_OBJECT (sink, "Forwarding kid error: %" GST_PTR_FORMAT, msg);
      gst_element_post_message (GST_ELEMENT (sink), msg);
    }
    /* FIXME: need a translated error message that tells the user to check
     * her GConf audio/video settings */
    GST_ELEMENT_ERROR (sink, CORE, STATE_CHANGE, (NULL),
        ("Failed to set state on new child."));
    gst_element_set_bus (new_kid, NULL);
    gst_object_unref (new_kid);
    return FALSE;
  }
  gst_element_set_bus (new_kid, NULL);
  gst_bin_add (GST_BIN (sink), new_kid);

  /* Now, replace the existing child */
  GST_OBJECT_LOCK (sink);
  old_kid = sink->kid;
  sink->kid = new_kid;
  /* Mark whether a custom kid or fakesink has been installed */
  sink->have_kid = !is_fakesink;
  GST_OBJECT_UNLOCK (sink);

  /* kill old element */
  if (old_kid) {
    GST_DEBUG_OBJECT (sink, "Removing old kid %" GST_PTR_FORMAT, old_kid);
    gst_element_set_state (old_kid, GST_STATE_NULL);
    gst_bin_remove (GST_BIN (sink), old_kid);
    gst_object_unref (old_kid);
  }

  /* re-attach ghostpad */
  GST_DEBUG_OBJECT (sink, "Creating new ghostpad");
  targetpad = gst_element_get_static_pad (sink->kid, "sink");
  gst_ghost_pad_set_target (GST_GHOST_PAD (sink->pad), targetpad);
  gst_object_unref (targetpad);
  GST_DEBUG_OBJECT (sink, "done changing child of switchsink");

  /* FIXME: Push new-segment info and pre-roll buffer(s) into the kid */

  /* Unblock the target pad if necessary */
  if (sink->awaiting_block) {
    gst_pad_set_blocked (sink->pad, FALSE);
    sink->awaiting_block = FALSE;
  }

  return TRUE;
}
static void
uridecodebin_pad_added_cb (GstElement * uridecodebin, GstPad * pad,
    GstDiscoverer * dc)
{
  PrivateStream *ps;
  GstPad *sinkpad = NULL;
  GstCaps *caps;
  static GstCaps *subs_caps = NULL;

  if (!subs_caps) {
    subs_caps = gst_caps_from_string ("text/plain; text/x-pango-markup; "
        "subpicture/x-pgs; subpicture/x-dvb; application/x-subtitle-unknown; "
        "application/x-ssa; application/x-ass; subtitle/x-kate; "
        "video/x-dvd-subpicture; ");
  }

  GST_DEBUG_OBJECT (dc, "pad %s:%s", GST_DEBUG_PAD_NAME (pad));

  ps = g_slice_new0 (PrivateStream);

  ps->dc = dc;
  ps->pad = pad;
  ps->queue = gst_element_factory_make ("queue", NULL);
  ps->sink = gst_element_factory_make ("fakesink", NULL);

  if (G_UNLIKELY (ps->queue == NULL || ps->sink == NULL))
    goto error;

  g_object_set (ps->sink, "silent", TRUE, NULL);
  g_object_set (ps->queue, "max-size-buffers", 1, "silent", TRUE, NULL);

  caps = gst_pad_get_caps_reffed (pad);

  if (gst_caps_can_intersect (caps, subs_caps)) {
    /* Subtitle streams are sparse and don't provide any information - don't
     * wait for data to preroll */
    g_object_set (ps->sink, "async", FALSE, NULL);
  }

  gst_caps_unref (caps);

  gst_bin_add_many (dc->priv->pipeline, ps->queue, ps->sink, NULL);

  if (!gst_element_link_pads_full (ps->queue, "src", ps->sink, "sink",
          GST_PAD_LINK_CHECK_NOTHING))
    goto error;
  if (!gst_element_sync_state_with_parent (ps->sink))
    goto error;
  if (!gst_element_sync_state_with_parent (ps->queue))
    goto error;

  sinkpad = gst_element_get_static_pad (ps->queue, "sink");
  if (sinkpad == NULL)
    goto error;
  if (gst_pad_link_full (pad, sinkpad,
          GST_PAD_LINK_CHECK_NOTHING) != GST_PAD_LINK_OK)
    goto error;
  gst_object_unref (sinkpad);

  /* Add an event probe */
  gst_pad_add_event_probe (pad, G_CALLBACK (_event_probe), ps);

  DISCO_LOCK (dc);
  dc->priv->streams = g_list_append (dc->priv->streams, ps);
  DISCO_UNLOCK (dc);

  GST_DEBUG_OBJECT (dc, "Done handling pad");

  return;

error:
  GST_ERROR_OBJECT (dc, "Error while handling pad");
  if (sinkpad)
    gst_object_unref (sinkpad);
  if (ps->queue)
    gst_object_unref (ps->queue);
  if (ps->sink)
    gst_object_unref (ps->sink);
  g_slice_free (PrivateStream, ps);
  return;
}
/* build a pipeline equivalent to:
 *
 * gst-launch -v rtpbin name=rtpbin \
 *    $AUDIO_SRC ! audioconvert ! audioresample ! $AUDIO_ENC ! $AUDIO_PAY ! rtpbin.send_rtp_sink_0  \
 *           rtpbin.send_rtp_src_0 ! udpsink port=5002 host=$DEST                      \
 *           rtpbin.send_rtcp_src_0 ! udpsink port=5003 host=$DEST sync=false async=false \
 *        udpsrc port=5007 ! rtpbin.recv_rtcp_sink_0
 */
int
main (int argc, char *argv[])
{
  GstElement *audiosrc, *audioconv, *audiores, *audioenc, *audiopay;
  GstElement *rtpbin, *rtpsink, *rtcpsink, *rtcpsrc;
  GstElement *pipeline;
  GMainLoop *loop;
  GstPad *srcpad, *sinkpad;

  /* always init first */
  gst_init (&argc, &argv);

  /* the pipeline to hold everything */
  pipeline = gst_pipeline_new (NULL);
  g_assert (pipeline);

  /* the audio capture and format conversion */
  audiosrc = gst_element_factory_make (AUDIO_SRC, "audiosrc");
  g_assert (audiosrc);
  audioconv = gst_element_factory_make ("audioconvert", "audioconv");
  g_assert (audioconv);
  audiores = gst_element_factory_make ("audioresample", "audiores");
  g_assert (audiores);
  /* the encoding and payloading */
  audioenc = gst_element_factory_make (AUDIO_ENC, "audioenc");
  g_assert (audioenc);
  audiopay = gst_element_factory_make (AUDIO_PAY, "audiopay");
  g_assert (audiopay);

  /* add capture and payloading to the pipeline and link */
  gst_bin_add_many (GST_BIN (pipeline), audiosrc, audioconv, audiores,
      audioenc, audiopay, NULL);

  if (!gst_element_link_many (audiosrc, audioconv, audiores, audioenc,
          audiopay, NULL)) {
    g_error ("Failed to link audiosrc, audioconv, audioresample, "
        "audio encoder and audio payloader");
  }

  /* the rtpbin element */
  rtpbin = gst_element_factory_make ("rtpbin", "rtpbin");
  g_assert (rtpbin);

  gst_bin_add (GST_BIN (pipeline), rtpbin);

  /* the udp sinks and source we will use for RTP and RTCP */
  rtpsink = gst_element_factory_make ("udpsink", "rtpsink");
  g_assert (rtpsink);
  g_object_set (rtpsink, "port", 5002, "host", DEST_HOST, NULL);

  rtcpsink = gst_element_factory_make ("udpsink", "rtcpsink");
  g_assert (rtcpsink);
  g_object_set (rtcpsink, "port", 5003, "host", DEST_HOST, NULL);
  /* no need for synchronisation or preroll on the RTCP sink */
  g_object_set (rtcpsink, "async", FALSE, "sync", FALSE, NULL);

  rtcpsrc = gst_element_factory_make ("udpsrc", "rtcpsrc");
  g_assert (rtcpsrc);
  g_object_set (rtcpsrc, "port", 5007, NULL);

  gst_bin_add_many (GST_BIN (pipeline), rtpsink, rtcpsink, rtcpsrc, NULL);

  /* now link all to the rtpbin, start by getting an RTP sinkpad for session 0 */
  sinkpad = gst_element_get_request_pad (rtpbin, "send_rtp_sink_0");
  srcpad = gst_element_get_static_pad (audiopay, "src");
  if (gst_pad_link (srcpad, sinkpad) != GST_PAD_LINK_OK)
    g_error ("Failed to link audio payloader to rtpbin");
  gst_object_unref (srcpad);

  /* get the RTP srcpad that was created when we requested the sinkpad above and
   * link it to the rtpsink sinkpad*/
  srcpad = gst_element_get_static_pad (rtpbin, "send_rtp_src_0");
  sinkpad = gst_element_get_static_pad (rtpsink, "sink");
  if (gst_pad_link (srcpad, sinkpad) != GST_PAD_LINK_OK)
    g_error ("Failed to link rtpbin to rtpsink");
  gst_object_unref (srcpad);
  gst_object_unref (sinkpad);

  /* get an RTCP srcpad for sending RTCP to the receiver */
  srcpad = gst_element_get_request_pad (rtpbin, "send_rtcp_src_0");
  sinkpad = gst_element_get_static_pad (rtcpsink, "sink");
  if (gst_pad_link (srcpad, sinkpad) != GST_PAD_LINK_OK)
    g_error ("Failed to link rtpbin to rtcpsink");
  gst_object_unref (sinkpad);

  /* we also want to receive RTCP, request an RTCP sinkpad for session 0 and
   * link it to the srcpad of the udpsrc for RTCP */
  srcpad = gst_element_get_static_pad (rtcpsrc, "src");
  sinkpad = gst_element_get_request_pad (rtpbin, "recv_rtcp_sink_0");
  if (gst_pad_link (srcpad, sinkpad) != GST_PAD_LINK_OK)
    g_error ("Failed to link rtcpsrc to rtpbin");
  gst_object_unref (srcpad);

  /* set the pipeline to playing */
  g_print ("starting sender pipeline\n");
  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  /* print stats every second */
  g_timeout_add_seconds (1, (GSourceFunc) print_stats, rtpbin);

  /* we need to run a GLib main loop to get the messages */
  loop = g_main_loop_new (NULL, FALSE);
  g_main_loop_run (loop);

  g_print ("stopping sender pipeline\n");
  gst_element_set_state (pipeline, GST_STATE_NULL);

  return 0;
}
static GstElement *
ges_track_video_transition_create_element (GESTrackObject * object)
{
  GstElement *topbin, *iconva, *iconvb, *scalea, *scaleb, *capsfilt, *oconv;
  GstObject *target = NULL;
  const gchar *propname = NULL;
  GstElement *mixer = NULL;
  GstPad *sinka_target, *sinkb_target, *src_target, *sinka, *sinkb, *src,
      *srca_pad;
  GESTrackVideoTransition *self;
  GESTrackVideoTransitionPrivate *priv;

  self = GES_TRACK_VIDEO_TRANSITION (object);
  priv = self->priv;

  GST_LOG ("creating a video bin");

  topbin = gst_bin_new ("transition-bin");
  iconva = gst_element_factory_make ("videoconvert", "tr-csp-a");
  iconvb = gst_element_factory_make ("videoconvert", "tr-csp-b");
  scalea = gst_element_factory_make ("videoscale", "vs-a");
  scaleb = gst_element_factory_make ("videoscale", "vs-b");
  capsfilt = gst_element_factory_make ("capsfilter", "capsfilt");
  oconv = gst_element_factory_make ("videoconvert", "tr-csp-output");

  gst_bin_add_many (GST_BIN (topbin), iconva, iconvb, scalea, scaleb, capsfilt,
      oconv, NULL);

  mixer = gst_element_factory_make ("videomixer", NULL);
  g_assert (mixer);
  g_object_set (G_OBJECT (mixer), "background", 1, NULL);
  gst_bin_add (GST_BIN (topbin), mixer);

  if (priv->pending_type != GES_VIDEO_STANDARD_TRANSITION_TYPE_CROSSFADE) {
    priv->sinka =
        (GstPad *) link_element_to_mixer_with_smpte (GST_BIN (topbin), iconva,
        mixer, priv->pending_type, NULL);
    priv->sinkb =
        (GstPad *) link_element_to_mixer_with_smpte (GST_BIN (topbin), iconvb,
        mixer, priv->pending_type, &priv->smpte);
    target = GST_OBJECT (priv->smpte);
    propname = "position";
    priv->start_value = 1.0;
    priv->end_value = 0.0;
  } else {
    gst_element_link_pads_full (iconva, "src", scalea, "sink",
        GST_PAD_LINK_CHECK_NOTHING);
    gst_element_link_pads_full (iconvb, "src", scaleb, "sink",
        GST_PAD_LINK_CHECK_NOTHING);
    gst_element_link_pads_full (scaleb, "src", capsfilt, "sink",
        GST_PAD_LINK_CHECK_NOTHING);

    priv->sinka = (GstPad *) link_element_to_mixer (scalea, mixer);
    priv->sinkb = (GstPad *) link_element_to_mixer (capsfilt, mixer);
    target = GST_OBJECT (priv->sinkb);
    propname = "alpha";
    priv->start_value = 0.0;
    priv->end_value = 1.0;
  }

  priv->mixer = gst_object_ref (mixer);

  fast_element_link (mixer, oconv);

  sinka_target = gst_element_get_static_pad (iconva, "sink");
  sinkb_target = gst_element_get_static_pad (iconvb, "sink");
  src_target = gst_element_get_static_pad (oconv, "src");

  sinka = gst_ghost_pad_new ("sinka", sinka_target);
  sinkb = gst_ghost_pad_new ("sinkb", sinkb_target);
  src = gst_ghost_pad_new ("src", src_target);

  gst_element_add_pad (topbin, src);
  gst_element_add_pad (topbin, sinka);
  gst_element_add_pad (topbin, sinkb);

  srca_pad = gst_element_get_static_pad (scalea, "src");
  g_signal_connect (srca_pad, "notify::caps", G_CALLBACK (on_caps_set),
      (GstElement *) capsfilt);

  gst_object_unref (sinka_target);
  gst_object_unref (sinkb_target);
  gst_object_unref (src_target);
  gst_object_unref (srca_pad);

  /* set up interpolation */

  set_interpolation (target, priv, propname);

  priv->topbin = topbin;
  priv->type = priv->pending_type;

  return topbin;
}
Exemple #14
0
void MediaImpl::gstPadAddedCallback(GstElement *src, GstPad *newPad, MediaImpl::GstPadHandlerData* data) {
  g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (newPad), GST_ELEMENT_NAME (src));
  bool isAudio = false;
  GstPad *sinkPad = NULL;

  // Check the new pad's type.
  GstCaps *newPadCaps = gst_pad_query_caps (newPad, NULL);
  GstStructure *newPadStruct = gst_caps_get_structure (newPadCaps, 0);
  const gchar *newPadType   = gst_structure_get_name (newPadStruct);
  g_print("Structure is %s\n", gst_structure_to_string(newPadStruct));
  if (g_str_has_prefix (newPadType, "audio/x-raw"))
  {
    sinkPad = gst_element_get_static_pad (data->audioToConnect, "sink");
    isAudio = true;
  }
  else if (g_str_has_prefix (newPadType, "video/x-raw"))
  {
    sinkPad = gst_element_get_static_pad (data->videoToConnect, "sink");
    isAudio = false;
  }
  else
  {
    g_print ("  It has type '%s' which is not raw audio/video. Ignoring.\n", newPadType);
    goto exit;
  }

  // If our converter is already linked, we have nothing to do here.
  if (gst_pad_is_linked (sinkPad))
  {
    // Best prefixes.
    if (g_str_has_prefix (newPadType, "audio/x-raw-float") ||
        g_str_has_prefix (newPadType, "video/x-raw-int") )
    {
      g_print ("  Found a better pad.\n");
      GstPad* oldPad = gst_pad_get_peer(sinkPad);
      gst_pad_unlink(oldPad, sinkPad);
      g_object_unref(oldPad);
    }
    else
    {
      g_print ("  We are already linked. Ignoring.\n");
      goto exit;
    }
  }

  // Attempt the link
  if (GST_PAD_LINK_FAILED (gst_pad_link (newPad, sinkPad))) {
    g_print ("  Type is '%s' but link failed.\n", newPadType);
    goto exit;
  } else {
    g_print ("  Link succeeded (type '%s').\n", newPadType);
    if (isAudio)
    {
      //data->audioIsConnected = true;
    }
    else
    {
      data->videoIsConnected = true;
    }
  }

exit:
  // Unreference the new pad's caps, if we got them.
  if (newPadCaps != NULL)
    gst_caps_unref (newPadCaps);

  // Unreference the sink pad.
  if (sinkPad != NULL)
    gst_object_unref (sinkPad);
}
static GstElement *
gst_auto_video_src_find_best (GstAutoVideoSrc * src)
{
  GList *list, *item;
  GstElement *choice = NULL;
  GstMessage *message = NULL;
  GSList *errors = NULL;
  GstBus *bus = gst_bus_new ();
  GstPad *el_pad = NULL;
  GstCaps *el_caps = NULL;
  gboolean no_match = TRUE;

  list = gst_registry_feature_filter (gst_registry_get (),
      (GstPluginFeatureFilter) gst_auto_video_src_factory_filter, FALSE, src);
  list = g_list_sort (list, (GCompareFunc) gst_auto_video_src_compare_ranks);

  GST_LOG_OBJECT (src, "Trying to find usable video devices ...");

  for (item = list; item != NULL; item = item->next) {
    GstElementFactory *f = GST_ELEMENT_FACTORY (item->data);
    GstElement *el;

    if ((el = gst_auto_video_src_create_element_with_pretty_name (src, f))) {
      GstStateChangeReturn ret;

      GST_DEBUG_OBJECT (src, "Testing %s", GST_OBJECT_NAME (f));

      /* If AutoVideoSrc has been provided with filter caps,
       * accept only sources that match with the filter caps */
      if (src->filter_caps) {
        el_pad = gst_element_get_static_pad (GST_ELEMENT (el), "src");
        el_caps = gst_pad_query_caps (el_pad, NULL);
        gst_object_unref (el_pad);
        GST_DEBUG_OBJECT (src,
            "Checking caps: %" GST_PTR_FORMAT " vs. %" GST_PTR_FORMAT,
            src->filter_caps, el_caps);
        no_match = !gst_caps_can_intersect (src->filter_caps, el_caps);
        gst_caps_unref (el_caps);

        if (no_match) {
          GST_DEBUG_OBJECT (src, "Incompatible caps");
          gst_object_unref (el);
          continue;
        } else {
          GST_DEBUG_OBJECT (src, "Found compatible caps");
        }
      }

      gst_element_set_bus (el, bus);
      ret = gst_element_set_state (el, GST_STATE_READY);
      if (ret == GST_STATE_CHANGE_SUCCESS) {
        GST_DEBUG_OBJECT (src, "This worked!");
        choice = el;
        break;
      }

      /* collect all error messages */
      while ((message = gst_bus_pop_filtered (bus, GST_MESSAGE_ERROR))) {
        GST_DEBUG_OBJECT (src, "error message %" GST_PTR_FORMAT, message);
        errors = g_slist_append (errors, message);
      }

      gst_element_set_state (el, GST_STATE_NULL);
      gst_object_unref (el);
    }
  }

  GST_DEBUG_OBJECT (src, "done trying");
  if (!choice) {
    if (errors) {
      /* FIXME: we forward the first error for now; but later on it might make
       * sense to actually analyse them */
      gst_message_ref (GST_MESSAGE (errors->data));
      GST_DEBUG_OBJECT (src, "reposting message %p", errors->data);
      gst_element_post_message (GST_ELEMENT (src), GST_MESSAGE (errors->data));
    } else {
      /* send warning message to application and use a fakesrc */
      GST_ELEMENT_WARNING (src, RESOURCE, NOT_FOUND, (NULL),
          ("Failed to find a usable video source"));
      choice = gst_element_factory_make ("fakesrc", "fake-video-src");
      if (g_object_class_find_property (G_OBJECT_GET_CLASS (choice), "sync"))
        g_object_set (choice, "sync", TRUE, NULL);
      gst_element_set_state (choice, GST_STATE_READY);
    }
  }
  gst_object_unref (bus);
  gst_plugin_feature_list_free (list);
  g_slist_foreach (errors, (GFunc) gst_mini_object_unref, NULL);
  g_slist_free (errors);

  return choice;
}
static void
on_pad_added (GstElement  *element,
              GstPad      *pad,
              gpointer    data)
{
  GstElement 	*mybin,   *rtp, *conv, *resample;
  GstElement	*alaw = NULL;
  GstPad 	*sinkpad, *mypad;
  GstElement 	*liveadder = (GstElement *) data;

  mybin	     = 	gst_bin_new(NULL);
  if (g711) {
    rtp	 = gst_element_factory_make ("rtppcmadepay", NULL);
    alaw = gst_element_factory_make("alawdec", NULL); 
    if (!alaw) {
      g_printerr ("on_pad_added; One element could not be created.\n");
      return;
    } else {
      if (!gst_bin_add (GST_BIN (mybin), alaw)) {
        g_printerr ("on_pad_added; One element could not be added to pipeline.\n");
        return;
      }
    }
  } else {
    rtp	     =	gst_element_factory_make ("rtpL16depay", NULL);
  }

  //  rganalyse  = 	gst_element_factory_make ("rganalysis",NULL);
  conv       = 	gst_element_factory_make ("audioconvert", NULL);
  resample   = 	gst_element_factory_make ("audioresample",NULL);

  if (!rtp || !conv || !resample /*!rganalyse ||*/ || !mybin) {
    g_printerr ("on_pad_added; One element could not be created.\n");
    return;
  }
  gst_bin_add_many (GST_BIN (mybin), rtp, conv, resample, /*rganalyse,*/ NULL);
  if (!gst_bin_add(GST_BIN (pipeline), mybin))
  {
    g_printerr ("on_pad_added; mybin can not be added to pipeline.\n");
    return;
  }
  //  g_object_set (G_OBJECT(rganalyse), "message", TRUE, NULL);

  if (g711) {
    if (!gst_element_link_many (rtp, alaw, conv, resample, /*rganalyse,*/ NULL)) {
      g_warning ("on_pad_added: Failed to link elements!");
      return;
    }
  } else {
    if (!gst_element_link_many (rtp, conv, resample, /*rganalyse,*/ NULL)) {
      g_warning ("on_pad_added: Failed to link elements!");
      return;
    }
  }

  // create sink and source ghostpads on mybin
  mypad = gst_element_get_static_pad (rtp, "sink");
  if (!mypad) {
    g_warning ("Failed to get sink pad from rtp!");
    return;
  } else {
    if (!gst_element_add_pad (mybin, gst_ghost_pad_new ("sink", mypad))) {
      g_warning ("Failed to add ghost sinkpad to bin!");
    }
    gst_object_unref(mypad);
  }

  mypad = gst_element_get_static_pad (resample, "src");
  if (!mypad) {
    g_warning ("Failed to get source pad from resample!");
    return;
  } else {
    if (!gst_element_add_pad (mybin, gst_ghost_pad_new ("src", mypad))) {
      g_warning ("Failed to add ghost sourcepad to bin!");
    }
    gst_object_unref(mypad);
  }
  
  //link bin to the liveadder
  if (!gst_element_link(mybin, liveadder)) {
    g_warning ("Failed to link bin to liveadder!");
    return;
  }

  // link dynamically created rtpbin pad to sinkpad of mybin
  sinkpad = gst_element_get_static_pad (mybin, "sink");	// get the sink pad from bin
  if (!gst_pad_is_linked (sinkpad)) {
    if (gst_pad_link (pad, sinkpad) != GST_PAD_LINK_OK) {
      g_error ("Failed to link pads!");
    }
  } else {     
    g_warning ("gstrtppad is already linked!\n");
  }
  gst_object_unref (sinkpad);

  // When the RTP stream of this new pad stops, this pad will be unlinked. At that moment the parent of the peer pad (this is the bin we just connected) can be removed from the pipeline. 
  g_signal_connect (pad, "unlinked", G_CALLBACK (on_pad_removed), NULL);  

  // Set new elements to PAUSED
  gst_element_set_state (mybin, GST_STATE_PAUSED);
}
/*
 * The following chain is created after the tee for each output from the
 * source:
 *
 * +-----------+   +-------------------------------+   +----------+
 * | inter*src +---+ converters/queues/capsfilters +---+ ghostpad |
 * +-----------+   +-------------------------------+   +----------+
 *
 */
static GstElement *owr_media_source_request_source_default(OwrMediaSource *media_source, GstCaps *caps)
{
    OwrMediaType media_type;
    GstElement *source_pipeline, *tee;
    GstElement *source_bin, *source = NULL, *queue_pre, *queue_post;
    GstElement *capsfilter;
    GstElement *sink, *sink_queue, *sink_bin;
    GstPad *bin_pad = NULL, *srcpad, *sinkpad;
    gchar *bin_name;
    guint source_id;
    gchar *channel_name;

    g_return_val_if_fail(media_source->priv->source_bin, NULL);
    g_return_val_if_fail(media_source->priv->source_tee, NULL);

    source_pipeline = gst_object_ref(media_source->priv->source_bin);
    tee = gst_object_ref(media_source->priv->source_tee);

    source_id = g_atomic_int_add(&unique_bin_id, 1);

    bin_name = g_strdup_printf("source-bin-%u", source_id);
    source_bin = gst_bin_new(bin_name);
    g_free(bin_name);

    CREATE_ELEMENT_WITH_ID(queue_pre, "queue", "source-queue", source_id);
    CREATE_ELEMENT_WITH_ID(capsfilter, "capsfilter", "source-output-capsfilter", source_id);
    CREATE_ELEMENT_WITH_ID(queue_post, "queue", "source-output-queue", source_id);

    CREATE_ELEMENT_WITH_ID(sink_queue, "queue", "sink-queue", source_id);

    g_object_get(media_source, "media-type", &media_type, NULL);
    switch (media_type) {
    case OWR_MEDIA_TYPE_AUDIO:
        {
        GstElement *audioresample, *audioconvert;

        CREATE_ELEMENT_WITH_ID(source, "interaudiosrc", "source", source_id);
        CREATE_ELEMENT_WITH_ID(sink, "interaudiosink", "sink", source_id);

        g_object_set(capsfilter, "caps", caps, NULL);

        CREATE_ELEMENT_WITH_ID(audioresample, "audioresample", "source-audio-resample", source_id);
        CREATE_ELEMENT_WITH_ID(audioconvert, "audioconvert", "source-audio-convert", source_id);

        gst_bin_add_many(GST_BIN(source_bin),
            queue_pre, audioconvert, audioresample, capsfilter, queue_post, NULL);
        LINK_ELEMENTS(capsfilter, queue_post);
        LINK_ELEMENTS(audioresample, capsfilter);
        LINK_ELEMENTS(audioconvert, audioresample);
        LINK_ELEMENTS(queue_pre, audioconvert);

        break;
        }
    case OWR_MEDIA_TYPE_VIDEO:
        {
        GstElement *videoscale, *videoconvert;

        CREATE_ELEMENT_WITH_ID(source, "intervideosrc", "source", source_id);
        CREATE_ELEMENT_WITH_ID(sink, "intervideosink", "sink", source_id);

        srcpad = gst_element_get_static_pad(source, "src");
        gst_pad_add_probe(srcpad, GST_PAD_PROBE_TYPE_BUFFER, drop_gap_buffers, NULL, NULL);
        gst_object_unref(srcpad);

        g_object_set(capsfilter, "caps", caps, NULL);

        CREATE_ELEMENT_WITH_ID(videoconvert, VIDEO_CONVERT, "source-video-convert", source_id);
        CREATE_ELEMENT_WITH_ID(videoscale, "videoscale", "source-video-scale", source_id);

        gst_bin_add_many(GST_BIN(source_bin),
            queue_pre, videoscale, videoconvert, capsfilter, queue_post, NULL);
        LINK_ELEMENTS(capsfilter, queue_post);
        LINK_ELEMENTS(videoconvert, capsfilter);
        LINK_ELEMENTS(videoscale, videoconvert);
        LINK_ELEMENTS(queue_pre, videoscale);

        break;
        }
    case OWR_MEDIA_TYPE_UNKNOWN:
    default:
        g_assert_not_reached();
        goto done;
    }

    channel_name = g_strdup_printf("source-%u", source_id);
    g_object_set(source, "channel", channel_name, NULL);
    g_object_set(sink, "channel", channel_name, NULL);
    g_free(channel_name);

    /* Add and link the inter*sink to the actual source pipeline */
    bin_name = g_strdup_printf("source-sink-bin-%u", source_id);
    sink_bin = gst_bin_new(bin_name);
    g_free(bin_name);
    gst_bin_add_many(GST_BIN(sink_bin), sink, sink_queue, NULL);
    gst_element_sync_state_with_parent(sink);
    gst_element_sync_state_with_parent(sink_queue);
    LINK_ELEMENTS(sink_queue, sink);
    sinkpad = gst_element_get_static_pad(sink_queue, "sink");
    bin_pad = gst_ghost_pad_new("sink", sinkpad);
    gst_object_unref(sinkpad);
    gst_pad_set_active(bin_pad, TRUE);
    gst_element_add_pad(sink_bin, bin_pad);
    bin_pad = NULL;
    gst_bin_add(GST_BIN(source_pipeline), sink_bin);
    gst_element_sync_state_with_parent(sink_bin);
    LINK_ELEMENTS(tee, sink_bin);

    /* Start up our new bin and link it all */
    srcpad = gst_element_get_static_pad(queue_post, "src");
    g_assert(srcpad);

    bin_pad = gst_ghost_pad_new("src", srcpad);
    gst_object_unref(srcpad);
    gst_pad_set_active(bin_pad, TRUE);
    gst_element_add_pad(source_bin, bin_pad);

    gst_bin_add(GST_BIN(source_bin), source);
    LINK_ELEMENTS(source, queue_pre);

done:

    gst_object_unref(source_pipeline);
    gst_object_unref(tee);

    return source_bin;
}
Exemple #18
0
/**
 * Tries to guess the frame rate for a V4L2 source.
 */
std::string Pipeline::guess_source_caps(unsigned int framerateIndex) const
{
    bool is_verbose = owner_->get_configuration()->get_verbose();
    if (is_verbose)
        LOG_DEBUG("Trying to guess source FPS " << framerateIndex);

    std::ostringstream capsStr;
    GstStateChangeReturn ret = gst_element_set_state(videosrc_, GST_STATE_READY);
    if (ret not_eq GST_STATE_CHANGE_SUCCESS)
        THROW_ERROR("Could not change v4l2src state to READY");
    GstPad *srcPad = gst_element_get_static_pad(videosrc_, "src");
    GstCaps *caps = gst_pad_get_caps(srcPad);
    GstStructure *structure = gst_caps_get_structure(caps, 0);
    const GValue *val = gst_structure_get_value(structure, "framerate");
    if (is_verbose)
        LOG_DEBUG("Caps structure from v4l2src srcpad: " << gst_structure_to_string(structure));
    gint framerate_numerator = 1;
    gint framerate_denominator = 1; 
    if (GST_VALUE_HOLDS_LIST(val))
    {
        // trying another one
        if (framerateIndex >= gst_value_list_get_size(val))
            THROW_ERROR("Framerate index out of range");
        framerate_numerator = gst_value_get_fraction_numerator((gst_value_list_get_value(val, framerateIndex)));
        framerate_denominator = gst_value_get_fraction_denominator((gst_value_list_get_value(val, framerateIndex)));
    }
    else
    {
        // FIXME: this is really bad, we should be iterating over framerates and resolutions until we find a good one
        if (framerateIndex > 0)
            LOG_ERROR("Caps parameters haven't been changed and have failed before");
        framerate_numerator = gst_value_get_fraction_numerator(val);
        framerate_denominator = gst_value_get_fraction_denominator(val);
    }

    gst_caps_unref(caps);
    gst_object_unref(srcPad);

    // use default from gst
    std::string capsSuffix = boost::lexical_cast<std::string>(framerate_numerator);
    capsSuffix += "/";
    capsSuffix += boost::lexical_cast<std::string>(framerate_denominator);

    // TODO: handle interlaced video capture stream
    
    if (v4l2util::isInterlaced(owner_->get_configuration()->videoSource()))
    {
        capsSuffix +=", interlaced=true";
    }
    // TODO: handle aspect ratio
    // capsSuffix += ", pixel-aspect-ratio=1/1";
    //capsSuffix += config_.pixelAspectRatio();
    //capsSuffix += "4:3";
    
    Configuration *config = owner_->get_configuration();
    capsStr << "video/x-raw-yuv, width=" 
        << config->get_capture_width() 
        << ", height="
        << config->get_capture_height()
        << ", framerate="
        << capsSuffix;
    if (is_verbose)
        LOG_DEBUG("Video source caps are " << capsStr.str());
    ret = gst_element_set_state(videosrc_, GST_STATE_NULL);
    if (ret not_eq GST_STATE_CHANGE_SUCCESS)
        THROW_ERROR("Could not change v4l2src state to NULL");
    return capsStr.str();
}
static GstPad *
empathy_audio_sink_request_new_pad (GstElement *element,
  GstPadTemplate *templ,
  const gchar* name)
{
  EmpathyGstAudioSink *self = EMPATHY_GST_AUDIO_SINK (element);
  GstElement *bin, *resample, *audioconvert0, *audioconvert1;
  GstPad *pad = NULL;
  GstPad *subpad, *filterpad;

  bin = gst_bin_new (NULL);

  audioconvert0 = gst_element_factory_make ("audioconvert", NULL);
  if (audioconvert0 == NULL)
    goto error;

  gst_bin_add (GST_BIN (bin), audioconvert0);

  resample = gst_element_factory_make ("audioresample", NULL);
  if (resample == NULL)
    goto error;

  gst_bin_add (GST_BIN (bin), resample);

  audioconvert1 = gst_element_factory_make ("audioconvert", NULL);
  if (audioconvert1 == NULL)
    goto error;

  gst_bin_add (GST_BIN (bin), audioconvert1);

  self->priv->sink = create_sink (self);
  if (self->priv->sink == NULL)
    goto error;

  if (GST_IS_STREAM_VOLUME (self->priv->sink))
    {
      g_static_mutex_lock (&self->priv->volume_mutex);
      if (self->priv->volume_idle_id == 0)
        self->priv->volume_idle_id = g_idle_add (
          empathy_audio_sink_volume_idle_setup, self);
      g_static_mutex_unlock (&self->priv->volume_mutex);
    }
  else
    {
      gchar *n = gst_element_get_name (self->priv->sink);

      DEBUG ("Element %s doesn't support volume", n);
      g_free (n);
    }

  gst_bin_add (GST_BIN (bin), self->priv->sink);

  if (!gst_element_link_many (audioconvert0, resample, audioconvert1,
      self->priv->sink, NULL))
    goto error;

  filterpad = gst_element_get_static_pad (audioconvert0, "sink");

  if (filterpad == NULL)
    goto error;

  subpad = gst_ghost_pad_new ("sink", filterpad);
  if (!gst_element_add_pad (GST_ELEMENT (bin), subpad))
    goto error;

  gst_bin_add (GST_BIN (self), bin);

  pad = gst_ghost_pad_new (name, subpad);
  g_assert (pad != NULL);

  if (!gst_element_sync_state_with_parent (bin))
    goto error;

  if (!gst_pad_set_active (pad, TRUE))
    goto error;

  if (!gst_element_add_pad (GST_ELEMENT (self), pad))
    goto error;

  return pad;

error:
  if (pad != NULL)
    {
      gst_object_unref (pad);
    }

  gst_object_unref (bin);
  g_warning ("Failed to create output subpipeline");
  return NULL;
}
Exemple #20
0
static void
empathy_audio_src_init (EmpathyGstAudioSrc *obj)
{
  EmpathyGstAudioSrcPrivate *priv = EMPATHY_GST_AUDIO_SRC_GET_PRIVATE (obj);
  GstPad *ghost, *src;

  obj->priv = priv;
  g_mutex_init (&priv->lock);

  priv->volume = 1.0;

  priv->src = create_src ();
  if (priv->src == NULL)
    return;

  if (GST_IS_STREAM_VOLUME (priv->src))
    {
      gdouble volume;
      gboolean mute;

      priv->have_stream_volume = TRUE;
      /* We can't do a bidirection bind as the ::notify comes from another
       * thread, for other bits of empathy it's most simpler if it comes from
       * the main thread */
      g_object_bind_property (obj, "volume", priv->src, "volume",
        G_BINDING_DEFAULT);
      g_object_bind_property (obj, "mute", priv->src, "mute",
        G_BINDING_DEFAULT);

      /* sync and callback for bouncing */
      g_object_get (priv->src, "volume", &volume, NULL);
      g_object_set (obj, "volume", volume, NULL);

      g_object_get (priv->src, "mute", &mute, NULL);
      g_object_set (obj, "mute", mute, NULL);

      g_signal_connect (priv->src, "notify::volume",
        G_CALLBACK (empathy_audio_src_volume_changed), obj);
      g_signal_connect (priv->src, "notify::mute",
        G_CALLBACK (empathy_audio_src_volume_changed), obj);
    }
  else
    {
      g_message ("No stream volume available :(, mute will work though");
      priv->have_stream_volume = FALSE;
    }

  gst_bin_add (GST_BIN (obj), priv->src);

  priv->volume_element = gst_element_factory_make ("volume", NULL);
  gst_bin_add (GST_BIN (obj), priv->volume_element);

  {
    GstElement *capsfilter;
    GstCaps *caps;

    /* Explicitly state what format we want from pulsesrc. This pushes resampling
     * and format conversion as early as possible, lowering the amount of data
     * transferred and thus improving performance. When moving to GStreamer
     * 0.11/1.0, this should change so that we actually request what the encoder
     * wants downstream. */
    caps = gst_caps_new_simple ("audio/x-raw-int",
        "channels", G_TYPE_INT, 1,
        "width", G_TYPE_INT, 16,
        "depth", G_TYPE_INT, 16,
        "rate", G_TYPE_INT, 32000,
        NULL);
    capsfilter = gst_element_factory_make ("capsfilter", NULL);
    g_object_set (G_OBJECT (capsfilter), "caps", caps, NULL);
    gst_bin_add (GST_BIN (obj), capsfilter);
    gst_element_link (priv->src, capsfilter);
    gst_element_link (capsfilter, priv->volume_element);
  }

  src = gst_element_get_static_pad (priv->volume_element, "src");

  ghost = gst_ghost_pad_new ("src", src);
  gst_element_add_pad (GST_ELEMENT (obj), ghost);

  gst_object_unref (G_OBJECT (src));

  /* Listen to changes to GstPulseSrc:source-output-index so we know when
   * it's no longer PA_INVALID_INDEX (starting for the first time) or if it
   * changes (READY->NULL->READY...) */
  g_signal_connect (priv->src, "notify::source-output-index",
      G_CALLBACK (empathy_audio_src_source_output_index_notify),
      obj);

  priv->mic_monitor = empathy_mic_monitor_new ();
  g_signal_connect (priv->mic_monitor, "microphone-changed",
      G_CALLBACK (empathy_audio_src_microphone_changed_cb), obj);

  priv->source_idx = PA_INVALID_INDEX;
}
// This function creates and initializes some internal variables, and returns a
// pointer to the element that should receive the data flow first
GstElement* MediaPlayerPrivateGStreamerBase::createVideoSink(GstElement* pipeline)
{
    if (!initializeGStreamer())
        return 0;

#if USE(NATIVE_FULLSCREEN_VIDEO)
    m_gstGWorld = GStreamerGWorld::createGWorld(pipeline);
    m_webkitVideoSink = webkitVideoSinkNew(m_gstGWorld.get());
#else
    UNUSED_PARAM(pipeline);
    m_webkitVideoSink = webkitVideoSinkNew();
#endif
    m_videoSinkPad = adoptGRef(gst_element_get_static_pad(m_webkitVideoSink.get(), "sink"));

    m_repaintHandler = g_signal_connect(m_webkitVideoSink.get(), "repaint-requested", G_CALLBACK(mediaPlayerPrivateRepaintCallback), this);

#if USE(NATIVE_FULLSCREEN_VIDEO)
    // Build a new video sink consisting of a bin containing a tee
    // (meant to distribute data to multiple video sinks) and our
    // internal video sink. For fullscreen we create an autovideosink
    // and initially block the data flow towards it and configure it

    m_videoSinkBin = gst_bin_new("video-sink");

    GstElement* videoTee = gst_element_factory_make("tee", "videoTee");
    GstElement* queue = gst_element_factory_make("queue", 0);

#ifdef GST_API_VERSION_1
    GRefPtr<GstPad> sinkPad = adoptGRef(gst_element_get_static_pad(videoTee, "sink"));
    GST_OBJECT_FLAG_SET(GST_OBJECT(sinkPad.get()), GST_PAD_FLAG_PROXY_ALLOCATION);
#endif

    gst_bin_add_many(GST_BIN(m_videoSinkBin.get()), videoTee, queue, NULL);

    // Link a new src pad from tee to queue1.
    gst_element_link_pads_full(videoTee, 0, queue, "sink", GST_PAD_LINK_CHECK_NOTHING);
#endif

    GstElement* actualVideoSink = 0;
    m_fpsSink = gst_element_factory_make("fpsdisplaysink", "sink");
    if (m_fpsSink) {
        // The verbose property has been added in -bad 0.10.22. Making
        // this whole code depend on it because we don't want
        // fpsdiplaysink to spit data on stdout.
        GstElementFactory* factory = GST_ELEMENT_FACTORY(GST_ELEMENT_GET_CLASS(m_fpsSink)->elementfactory);
        if (gst_plugin_feature_check_version(GST_PLUGIN_FEATURE(factory), 0, 10, 22)) {
            g_object_set(m_fpsSink, "silent", TRUE , NULL);

            // Turn off text overlay unless logging is enabled.
#if LOG_DISABLED
            g_object_set(m_fpsSink, "text-overlay", FALSE , NULL);
#else
            WTFLogChannel* channel = getChannelFromName("Media");
            if (channel->state != WTFLogChannelOn)
                g_object_set(m_fpsSink, "text-overlay", FALSE , NULL);
#endif // LOG_DISABLED

            if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_fpsSink), "video-sink")) {
                g_object_set(m_fpsSink, "video-sink", m_webkitVideoSink.get(), NULL);
#if USE(NATIVE_FULLSCREEN_VIDEO)
                gst_bin_add(GST_BIN(m_videoSinkBin.get()), m_fpsSink);
#endif
                actualVideoSink = m_fpsSink;
            } else
                m_fpsSink = 0;
        } else
            m_fpsSink = 0;
    }

    if (!m_fpsSink) {
#if USE(NATIVE_FULLSCREEN_VIDEO)
        gst_bin_add(GST_BIN(m_videoSinkBin.get()), m_webkitVideoSink.get());
#endif
        actualVideoSink = m_webkitVideoSink.get();
    }

    ASSERT(actualVideoSink);

#if USE(NATIVE_FULLSCREEN_VIDEO)
    // Faster elements linking.
    gst_element_link_pads_full(queue, "src", actualVideoSink, "sink", GST_PAD_LINK_CHECK_NOTHING);

    // Add a ghostpad to the bin so it can proxy to tee.
    GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(videoTee, "sink"));
    gst_element_add_pad(m_videoSinkBin.get(), gst_ghost_pad_new("sink", pad.get()));

    // Set the bin as video sink of playbin.
    return m_videoSinkBin.get();
#else
    return actualVideoSink;
#endif
}
gint client_video_stream(int rtp_src,int rtcp_src,int rtcp_sink)
{
  GstElement *rtpbin;
  GstElement *rtpvsrc, *rtcpvsrc, *rtcpvsink;

  GstElement  *videodec, *videosink;  
  GstElement *queue;

  
  GstCaps *caps;
  gboolean res;
  GstPadLinkReturn lres;
  GstPad *srcpad, *sinkpad;

  RTP_SRC_V=rtp_src;
  RTCP_SRC_V=rtcp_src;
  RTCP_SINK_V=rtcp_sink;


  pipelineVC = gst_pipeline_new ("Client");
  g_assert (pipelineVC);


  rtpvsrc = gst_element_factory_make ("udpsrc", "rtpvsrc");
  g_assert (rtpvsrc);
  g_object_set (rtpvsrc, "port", RTP_SRC_V, NULL);
  caps = gst_caps_from_string (VIDEO_CAPS);
  g_object_set (rtpvsrc, "caps", caps, NULL);
  gst_caps_unref (caps);

  rtcpvsrc = gst_element_factory_make ("udpsrc", "rtcpvsrc");
  g_assert (rtcpvsrc);
  g_object_set (rtcpvsrc, "port", RTCP_SRC_V, NULL);

  rtcpvsink = gst_element_factory_make ("udpsink", "rtcpvsink");
  g_assert (rtcpvsink);		
  g_object_set (rtcpvsink, "port", RTCP_SINK_V, "host", DEST_HOST, NULL);
  g_object_set (rtcpvsink, "async", FALSE, "sync", FALSE, NULL);



  gst_bin_add_many (GST_BIN (pipelineVC), rtpvsrc, rtcpvsrc, rtcpvsink, NULL);

  //Video
  videodepay = gst_element_factory_make ("rtpvp8depay", "videodepay");
  g_assert (videodepay);
  videodec = gst_element_factory_make ("vp8dec", "videodec");
  g_assert (videodec);
  videosink = gst_element_factory_make ("xvimagesink", "videosink");
  g_assert (videosink);
  g_object_set(videosink,"sync",FALSE,NULL);
  queue = gst_element_factory_make ("queue","queue");
  g_assert(queue);

  /* add depayloading and playback to the pipelineVC and link */
  gst_bin_add_many (GST_BIN (pipelineVC), videodepay,queue, videodec, videosink, NULL);


  res = gst_element_link_many (videodepay,queue, videodec, videosink, NULL);
  g_assert (res == TRUE);


  /* the rtpbin element */
  rtpbin = gst_element_factory_make ("gstrtpbin", "rtpbin");
  g_assert (rtpbin);
  g_object_set(rtpbin,"latency",10,NULL);

  gst_bin_add (GST_BIN (pipelineVC), rtpbin);





  //Video
  srcpad = gst_element_get_static_pad (rtpvsrc, "src");
  sinkpad = gst_element_get_request_pad (rtpbin, "recv_rtp_sink_1");
  lres = gst_pad_link (srcpad, sinkpad);
  g_assert (lres == GST_PAD_LINK_OK);
  gst_object_unref (srcpad);

  // RTCP sinkpad in session 1 
  srcpad = gst_element_get_static_pad (rtcpvsrc, "src");
  sinkpad = gst_element_get_request_pad (rtpbin, "recv_rtcp_sink_1");
  lres = gst_pad_link (srcpad, sinkpad);
  g_assert (lres == GST_PAD_LINK_OK);
  gst_object_unref (srcpad);
  gst_object_unref (sinkpad);

  // RTCP srcpad for sending RTCP 
  srcpad = gst_element_get_request_pad (rtpbin, "send_rtcp_src_1");
  sinkpad = gst_element_get_static_pad (rtcpvsink, "sink");
  lres = gst_pad_link (srcpad, sinkpad);
  g_assert (lres == GST_PAD_LINK_OK);
  gst_object_unref (sinkpad);


  //Directing xvideo to Drawing Area
  if (GST_IS_X_OVERLAY (videosink))
  {
            gst_x_overlay_set_window_handle(GST_X_OVERLAY(videosink), GPOINTER_TO_UINT(GINT_TO_POINTER(GDK_WINDOW_XWINDOW(remote_video->window))));
  }
  
  

  g_signal_connect (rtpbin, "pad-added", G_CALLBACK (pad_added_cb), NULL);

  
  g_print ("starting receiver pipelineVC\n");

  return 0;
}
static void
empathy_video_widget_constructed (GObject *object)
{
  EmpathyVideoWidgetPriv *priv = GET_PRIV (object);
  GstElement *colorspace, *videoscale, *sink;
  GstPad *pad;

  g_signal_connect (object, "realize",
      G_CALLBACK (empathy_video_widget_realized), NULL);

  priv->videosink = gst_bin_new (NULL);

  gst_object_ref (priv->videosink);
  gst_object_sink (priv->videosink);

  priv->sink_pad = gst_element_get_static_pad (priv->videosink, "sink");

  sink = gst_element_factory_make ("gconfvideosink", NULL);
  g_assert (sink != NULL);

  videoscale = gst_element_factory_make ("videoscale", NULL);
  g_assert (videoscale != NULL);

  g_object_set (videoscale, "qos", FALSE, NULL);

  colorspace = gst_element_factory_make ("ffmpegcolorspace", NULL);
  g_assert (colorspace != NULL);

  g_object_set (colorspace, "qos", FALSE, NULL);

  priv->flip = gst_element_factory_make ("videoflip", NULL);
  g_assert (priv->flip != NULL);

  gst_bin_add_many (GST_BIN (priv->videosink), colorspace, videoscale,
    priv->flip, sink, NULL);

  if (!gst_element_link (colorspace, videoscale))
    g_error ("Failed to link ffmpegcolorspace and videoscale");

  if (!gst_element_link (videoscale, priv->flip))
    g_error ("Failed to link videoscale and videoflip");

  if (!gst_element_link (priv->flip, sink))
    g_error ("Failed to link videoflip and gconfvideosink");

  pad = gst_element_get_static_pad (colorspace, "sink");
  g_assert (pad != NULL);

  priv->sink_pad = gst_ghost_pad_new ("sink", pad);
  if (!gst_element_add_pad  (priv->videosink, priv->sink_pad))
    g_error ("Couldn't add sink ghostpad to the bin");

  gst_object_unref (pad);

  fs_element_added_notifier_add (priv->notifier, GST_BIN (priv->videosink));
  gst_bus_enable_sync_message_emission (priv->bus);

  g_signal_connect (priv->bus, "sync-message",
    G_CALLBACK (empathy_video_widget_sync_message_cb), object);

  gtk_widget_set_size_request (GTK_WIDGET (object), priv->min_width,
    priv->min_height);
}
Exemple #24
0
static void
gst_camera_bin_set_property (GObject * object, guint prop_id,
    const GValue * value, GParamSpec * pspec)
{
  GstCameraBin *camera = GST_CAMERA_BIN_CAST (object);

  switch (prop_id) {
    case PROP_MODE:
      gst_camera_bin_change_mode (camera, g_value_get_enum (value));
      break;
    case PROP_LOCATION:
      gst_camera_bin_set_location (camera, g_value_get_string (value));
      break;
    case PROP_CAMERA_SRC:
      gst_camera_bin_set_camera_src (camera, g_value_get_object (value));
      break;
    case PROP_IMAGE_CAPTURE_CAPS:{
      GstPad *pad = NULL;

      if (camera->src)
        pad =
            gst_element_get_static_pad (camera->src,
            GST_BASE_CAMERA_SRC_IMAGE_PAD_NAME);

      GST_DEBUG_OBJECT (camera,
          "Setting image capture caps to %" GST_PTR_FORMAT,
          gst_value_get_caps (value));

      /* set the capsfilter caps and notify the src to renegotiate */
      g_object_set (camera->imagebin_capsfilter, "caps",
          gst_value_get_caps (value), NULL);
      if (pad) {
        GST_DEBUG_OBJECT (camera, "Pushing renegotiate on %s",
            GST_PAD_NAME (pad));
        GST_PAD_EVENTFUNC (pad) (pad, gst_camera_bin_new_event_renegotiate ());
        gst_object_unref (pad);
      }
    }
      break;
    case PROP_VIDEO_CAPTURE_CAPS:{
      GstPad *pad = NULL;

      if (camera->src)
        pad =
            gst_element_get_static_pad (camera->src,
            GST_BASE_CAMERA_SRC_VIDEO_PAD_NAME);

      GST_DEBUG_OBJECT (camera,
          "Setting video capture caps to %" GST_PTR_FORMAT,
          gst_value_get_caps (value));

      /* set the capsfilter caps and notify the src to renegotiate */
      g_object_set (camera->videobin_capsfilter, "caps",
          gst_value_get_caps (value), NULL);
      if (pad) {
        GST_DEBUG_OBJECT (camera, "Pushing renegotiate on %s",
            GST_PAD_NAME (pad));
        GST_PAD_EVENTFUNC (pad) (pad, gst_camera_bin_new_event_renegotiate ());
        gst_object_unref (pad);
      }
    }
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }
}
/*
 * Runs the RTP pipeline.
 * @param p Pointer to the RTP pipeline.
 */
static void
rtp_pipeline_run (rtp_pipeline * p)
{
  GstFlowReturn flow_ret;
  GMainLoop *mainloop = NULL;
  GstBus *bus;
  gint i, j;

  /* Check parameters. */
  if (p == NULL) {
    return;
  }

  /* Create mainloop. */
  mainloop = g_main_loop_new (NULL, FALSE);
  if (!mainloop) {
    return;
  }

  /* Add bus callback. */
  bus = gst_pipeline_get_bus (GST_PIPELINE (p->pipeline));

  gst_bus_add_watch (bus, rtp_bus_callback, (gpointer) mainloop);
  gst_object_unref (bus);

  /* Set pipeline to PLAYING. */
  gst_element_set_state (p->pipeline, GST_STATE_PLAYING);

  /* Push custom event into the pipeline */
  if (p->custom_event) {
    GstPad *srcpad;

    /* Install a probe to drop the event after it being serialized */
    srcpad = gst_element_get_static_pad (p->rtppay, "src");
    gst_pad_add_probe (srcpad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
        pay_event_probe_cb, p, NULL);
    gst_object_unref (srcpad);

    /* Install a probe to trace the deserialized event after depayloading */
    srcpad = gst_element_get_static_pad (p->rtpdepay, "src");
    gst_pad_add_probe (srcpad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
        depay_event_probe_cb, p, NULL);
    gst_object_unref (srcpad);
    /* Send the event */
    gst_element_send_event (p->appsrc, gst_event_ref (p->custom_event));
  }

  /* Push data into the pipeline */
  for (i = 0; i < LOOP_COUNT; i++) {
    const guint8 *data = p->frame_data;

    for (j = 0; j < p->frame_count; j++) {
      GstBuffer *buf;

      buf =
          gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY,
          (guint8 *) data, p->frame_data_size, 0, p->frame_data_size, NULL,
          NULL);

      g_signal_emit_by_name (p->appsrc, "push-buffer", buf, &flow_ret);
      fail_unless_equals_int (flow_ret, GST_FLOW_OK);
      data += p->frame_data_size;

      gst_buffer_unref (buf);
    }
  }

  g_signal_emit_by_name (p->appsrc, "end-of-stream", &flow_ret);

  /* Run mainloop. */
  g_main_loop_run (mainloop);

  /* Set pipeline to NULL. */
  gst_element_set_state (p->pipeline, GST_STATE_NULL);

  /* Release mainloop. */
  g_main_loop_unref (mainloop);

  fail_if (p->custom_event);
}
Exemple #26
0
static void
gst_camera_bin_get_property (GObject * object, guint prop_id,
    GValue * value, GParamSpec * pspec)
{
  GstCameraBin *camera = GST_CAMERA_BIN_CAST (object);

  switch (prop_id) {
    case PROP_MODE:
      g_value_set_enum (value, camera->mode);
      break;
    case PROP_LOCATION:
      if (camera->mode == MODE_VIDEO) {
        g_value_set_string (value, camera->video_location);
      } else {
        g_value_set_string (value, camera->image_location);
      }
      break;
    case PROP_CAMERA_SRC:
      g_value_set_object (value, camera->src);
      break;
    case PROP_VIDEO_CAPTURE_SUPPORTED_CAPS:
    case PROP_IMAGE_CAPTURE_SUPPORTED_CAPS:{
      GstPad *pad;
      GstCaps *caps;
      const gchar *padname;

      if (prop_id == PROP_VIDEO_CAPTURE_SUPPORTED_CAPS) {
        padname = GST_BASE_CAMERA_SRC_VIDEO_PAD_NAME;
      } else {
        padname = GST_BASE_CAMERA_SRC_IMAGE_PAD_NAME;
      }

      if (camera->src) {
        pad = gst_element_get_static_pad (camera->src, padname);

        g_assert (pad != NULL);

        /* TODO not sure if we want get_caps or get_allowed_caps to already
         * consider the full pipeline scenario and avoid picking a caps that
         * won't negotiate. Need to take care on the special case of the
         * pad being unlinked.
         */
        caps = gst_pad_get_caps_reffed (pad);
        if (caps) {
          gst_value_set_caps (value, caps);
          gst_caps_unref (caps);
        }

        gst_object_unref (pad);
      } else {
        GST_DEBUG_OBJECT (camera, "Camera source not created, can't get "
            "supported caps");
      }
    }
      break;
    case PROP_IMAGE_CAPTURE_CAPS:{
      GstCaps *caps = NULL;
      g_object_get (camera->imagebin_capsfilter, "caps", &caps, NULL);
      gst_value_set_caps (value, caps);
      gst_caps_unref (caps);
    }
      break;
    case PROP_VIDEO_CAPTURE_CAPS:{
      GstCaps *caps = NULL;
      g_object_get (camera->videobin_capsfilter, "caps", &caps, NULL);
      gst_value_set_caps (value, caps);
      gst_caps_unref (caps);
    }
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }
}
Exemple #27
0
static GstElement *
setup_multiqueue (GstElement * pipe, GstElement * inputs[],
    GstElement * outputs[], guint num)
{
  GstElement *mq;
  guint i;

  mq = gst_element_factory_make ("multiqueue", NULL);
  fail_unless (mq != NULL, "failed to create 'multiqueue' element");

  gst_bin_add (GST_BIN (pipe), mq);

  for (i = 0; i < num; ++i) {
    GstPad *sinkpad = NULL;
    GstPad *srcpad = NULL;

    /* create multiqueue sink (and source) pad */
    sinkpad = gst_element_get_request_pad (mq, "sink_%u");
    fail_unless (sinkpad != NULL,
        "failed to create multiqueue request pad #%u", i);

    /* link input element N to the N-th multiqueue sink pad we just created */
    if (inputs != NULL && inputs[i] != NULL) {
      gst_bin_add (GST_BIN (pipe), inputs[i]);

      srcpad = gst_element_get_static_pad (inputs[i], "src");
      fail_unless (srcpad != NULL, "failed to find src pad for input #%u", i);

      fail_unless_equals_int (GST_PAD_LINK_OK, gst_pad_link (srcpad, sinkpad));

      gst_object_unref (srcpad);
      srcpad = NULL;
    }
    gst_object_unref (sinkpad);
    sinkpad = NULL;

    /* link output element N to the N-th multiqueue src pad */
    if (outputs != NULL && outputs[i] != NULL) {
      gchar padname[10];

      /* only the sink pads are by request, the source pads are sometimes pads,
       * so this should return NULL */
      srcpad = gst_element_get_request_pad (mq, "src_%u");
      fail_unless (srcpad == NULL);

      g_snprintf (padname, sizeof (padname), "src_%u", i);
      srcpad = gst_element_get_static_pad (mq, padname);
      fail_unless (srcpad != NULL, "failed to get multiqueue src pad #%u", i);
      fail_unless (GST_PAD_IS_SRC (srcpad),
          "%s:%s is not a source pad?!", GST_DEBUG_PAD_NAME (srcpad));

      gst_bin_add (GST_BIN (pipe), outputs[i]);

      sinkpad = gst_element_get_static_pad (outputs[i], "sink");
      fail_unless (sinkpad != NULL, "failed to find sink pad of output #%u", i);
      fail_unless (GST_PAD_IS_SINK (sinkpad));

      fail_unless_equals_int (GST_PAD_LINK_OK, gst_pad_link (srcpad, sinkpad));

      gst_object_unref (srcpad);
      gst_object_unref (sinkpad);
    }
  }

  return mq;
}
static void
test_interleave_2ch_pipeline (gboolean interleaved)
{
  GstElement *pipeline, *queue, *src1, *src2, *interleave, *sink;
  GstPad *sinkpad0, *sinkpad1, *tmp, *tmp2;
  GstMessage *msg;
  void *src_handoff_float32 =
      interleaved ? &src_handoff_float32_interleaved :
      &src_handoff_float32_non_interleaved;

  have_data = 0;

  pipeline = (GstElement *) gst_pipeline_new ("pipeline");
  fail_unless (pipeline != NULL);

  src1 = gst_element_factory_make ("fakesrc", "src1");
  fail_unless (src1 != NULL);
  g_object_set (src1, "num-buffers", 4, NULL);
  g_object_set (src1, "signal-handoffs", TRUE, NULL);
  g_signal_connect (src1, "handoff", G_CALLBACK (src_handoff_float32),
      GINT_TO_POINTER (0));
  gst_bin_add (GST_BIN (pipeline), src1);

  src2 = gst_element_factory_make ("fakesrc", "src2");
  fail_unless (src2 != NULL);
  g_object_set (src2, "num-buffers", 4, NULL);
  g_object_set (src2, "signal-handoffs", TRUE, NULL);
  g_signal_connect (src2, "handoff", G_CALLBACK (src_handoff_float32),
      GINT_TO_POINTER (1));
  gst_bin_add (GST_BIN (pipeline), src2);

  queue = gst_element_factory_make ("queue", "queue");
  fail_unless (queue != NULL);
  gst_bin_add (GST_BIN (pipeline), queue);

  interleave = gst_element_factory_make ("interleave", "interleave");
  fail_unless (interleave != NULL);
  gst_bin_add (GST_BIN (pipeline), gst_object_ref (interleave));

  sinkpad0 = gst_element_get_request_pad (interleave, "sink_%u");
  fail_unless (sinkpad0 != NULL);
  tmp = gst_element_get_static_pad (src1, "src");
  fail_unless (gst_pad_link (tmp, sinkpad0) == GST_PAD_LINK_OK);
  gst_object_unref (tmp);

  sinkpad1 = gst_element_get_request_pad (interleave, "sink_%u");
  fail_unless (sinkpad1 != NULL);
  tmp = gst_element_get_static_pad (src2, "src");
  tmp2 = gst_element_get_static_pad (queue, "sink");
  fail_unless (gst_pad_link (tmp, tmp2) == GST_PAD_LINK_OK);
  gst_object_unref (tmp);
  gst_object_unref (tmp2);
  tmp = gst_element_get_static_pad (queue, "src");
  fail_unless (gst_pad_link (tmp, sinkpad1) == GST_PAD_LINK_OK);
  gst_object_unref (tmp);

  sink = gst_element_factory_make ("fakesink", "sink");
  fail_unless (sink != NULL);
  g_object_set (sink, "signal-handoffs", TRUE, NULL);
  g_signal_connect (sink, "handoff", G_CALLBACK (sink_handoff_float32),
      GINT_TO_POINTER (0));
  gst_bin_add (GST_BIN (pipeline), sink);
  tmp = gst_element_get_static_pad (interleave, "src");
  tmp2 = gst_element_get_static_pad (sink, "sink");
  fail_unless (gst_pad_link (tmp, tmp2) == GST_PAD_LINK_OK);
  gst_object_unref (tmp);
  gst_object_unref (tmp2);

  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  msg = gst_bus_poll (GST_ELEMENT_BUS (pipeline), GST_MESSAGE_EOS, -1);
  gst_message_unref (msg);

  fail_unless (have_data == 4);

  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_element_release_request_pad (interleave, sinkpad0);
  gst_object_unref (sinkpad0);
  gst_element_release_request_pad (interleave, sinkpad1);
  gst_object_unref (sinkpad1);
  gst_object_unref (interleave);
  gst_object_unref (pipeline);
}
nsresult GStreamerReader::ReadMetadata(MediaInfo* aInfo,
                                       MetadataTags** aTags)
{
  MOZ_ASSERT(OnTaskQueue());
  nsresult ret = NS_OK;

  /*
   * Parse MP3 headers before we kick off the GStreamer pipeline otherwise there
   * might be concurrent stream operations happening on both decoding and gstreamer
   * threads which will screw the GStreamer state machine.
   */
  LOG(LogLevel::Debug, "content-type: %s %s",
      mDecoder->GetResource()->GetContentType().get(),
      mDecoder->GetResource()->GetContentURL().get());
  bool isMP3 = mDecoder->GetResource()->GetContentType().EqualsASCII(AUDIO_MP3);
  if (isMP3) {
    ParseMP3Headers();
  }


  /* We do 3 attempts here: decoding audio and video, decoding video only,
   * decoding audio only. This allows us to play streams that have one broken
   * stream but that are otherwise decodeable.
   */
  guint flags[3] = {GST_PLAY_FLAG_VIDEO|GST_PLAY_FLAG_AUDIO,
    static_cast<guint>(~GST_PLAY_FLAG_AUDIO), static_cast<guint>(~GST_PLAY_FLAG_VIDEO)};
  guint default_flags, current_flags;
  g_object_get(mPlayBin, "flags", &default_flags, nullptr);

  GstMessage* message = nullptr;
  for (unsigned int i = 0; i < G_N_ELEMENTS(flags); i++) {
    current_flags = default_flags & flags[i];
    g_object_set(G_OBJECT(mPlayBin), "flags", current_flags, nullptr);

    /* reset filter caps to ANY */
    GstCaps* caps = gst_caps_new_any();
    GstElement* filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter");
    g_object_set(filter, "caps", caps, nullptr);
    gst_object_unref(filter);

    filter = gst_bin_get_by_name(GST_BIN(mVideoSink), "filter");
    g_object_set(filter, "caps", caps, nullptr);
    gst_object_unref(filter);
    gst_caps_unref(caps);
    filter = nullptr;

    if (!(current_flags & GST_PLAY_FLAG_AUDIO))
      filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter");
    else if (!(current_flags & GST_PLAY_FLAG_VIDEO))
      filter = gst_bin_get_by_name(GST_BIN(mVideoSink), "filter");

    if (filter) {
      /* Little trick: set the target caps to "skip" so that playbin2 fails to
       * find a decoder for the stream we want to skip.
       */
      GstCaps* filterCaps = gst_caps_new_simple ("skip", nullptr, nullptr);
      g_object_set(filter, "caps", filterCaps, nullptr);
      gst_caps_unref(filterCaps);
      gst_object_unref(filter);
    }

    LOG(LogLevel::Debug, "starting metadata pipeline");
    if (gst_element_set_state(mPlayBin, GST_STATE_PAUSED) == GST_STATE_CHANGE_FAILURE) {
      LOG(LogLevel::Debug, "metadata pipeline state change failed");
      ret = NS_ERROR_FAILURE;
      continue;
    }

    /* Wait for ASYNC_DONE, which is emitted when the pipeline is built,
     * prerolled and ready to play. Also watch for errors.
     */
    message = gst_bus_timed_pop_filtered(mBus, GST_CLOCK_TIME_NONE,
                 (GstMessageType)(GST_MESSAGE_ASYNC_DONE | GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
    if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ASYNC_DONE) {
      LOG(LogLevel::Debug, "read metadata pipeline prerolled");
      gst_message_unref(message);
      ret = NS_OK;
      break;
    } else {
      LOG(LogLevel::Debug, "read metadata pipeline failed to preroll: %s",
            gst_message_type_get_name (GST_MESSAGE_TYPE (message)));

      if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ERROR) {
        GError* error;
        gchar* debug;
        gst_message_parse_error(message, &error, &debug);
        LOG(LogLevel::Error, "read metadata error: %s: %s", error->message, debug);
        g_error_free(error);
        g_free(debug);
      }
      /* Unexpected stream close/EOS or other error. We'll give up if all
       * streams are in error/eos. */
      gst_element_set_state(mPlayBin, GST_STATE_NULL);
      gst_message_unref(message);
      ret = NS_ERROR_FAILURE;
    }
  }

  if (NS_SUCCEEDED(ret))
    ret = CheckSupportedFormats();

  if (NS_FAILED(ret))
    /* we couldn't get this to play */
    return ret;

  /* report the duration */
  gint64 duration;

  if (isMP3 && mMP3FrameParser.IsMP3()) {
    // The MP3FrameParser has reported a duration; use that over the gstreamer
    // reported duration for inter-platform consistency.
    mUseParserDuration = true;
    mLastParserDuration = mMP3FrameParser.GetDuration();
    mInfo.mMetadataDuration.emplace(TimeUnit::FromMicroseconds(mLastParserDuration));
  } else {
    LOG(LogLevel::Debug, "querying duration");
    // Otherwise use the gstreamer duration.
#if GST_VERSION_MAJOR >= 1
    if (gst_element_query_duration(GST_ELEMENT(mPlayBin),
          GST_FORMAT_TIME, &duration)) {
#else
    GstFormat format = GST_FORMAT_TIME;
    if (gst_element_query_duration(GST_ELEMENT(mPlayBin),
      &format, &duration) && format == GST_FORMAT_TIME) {
#endif
      LOG(LogLevel::Debug, "have duration %" GST_TIME_FORMAT, GST_TIME_ARGS(duration));
      duration = GST_TIME_AS_USECONDS (duration);
      mInfo.mMetadataDuration.emplace(TimeUnit::FromMicroseconds(duration));
    }
  }

  int n_video = 0, n_audio = 0;
  g_object_get(mPlayBin, "n-video", &n_video, "n-audio", &n_audio, nullptr);

  if (!n_video) {
    mInfo.mVideo = VideoInfo();
  }
  if (!n_audio) {
    mInfo.mAudio = AudioInfo();
  }
  *aInfo = mInfo;

  *aTags = nullptr;

  // Watch the pipeline for fatal errors
#if GST_VERSION_MAJOR >= 1
  gst_bus_set_sync_handler(mBus, GStreamerReader::ErrorCb, this, nullptr);
#else
  gst_bus_set_sync_handler(mBus, GStreamerReader::ErrorCb, this);
#endif

  /* set the pipeline to PLAYING so that it starts decoding and queueing data in
   * the appsinks */
  gst_element_set_state(mPlayBin, GST_STATE_PLAYING);

  return NS_OK;
}

bool
GStreamerReader::IsMediaSeekable()
{
  if (mUseParserDuration) {
    return true;
  }

  gint64 duration;
#if GST_VERSION_MAJOR >= 1
  if (gst_element_query_duration(GST_ELEMENT(mPlayBin), GST_FORMAT_TIME,
                                 &duration)) {
#else
  GstFormat format = GST_FORMAT_TIME;
  if (gst_element_query_duration(GST_ELEMENT(mPlayBin), &format, &duration) &&
      format == GST_FORMAT_TIME) {
#endif
    return true;
  }

  return false;
}

nsresult GStreamerReader::CheckSupportedFormats()
{
  bool done = false;
  bool unsupported = false;

  GstIterator* it = gst_bin_iterate_recurse(GST_BIN(mPlayBin));
  while (!done) {
    GstIteratorResult res;
    GstElement* element;

#if GST_VERSION_MAJOR >= 1
    GValue value = {0,};
    res = gst_iterator_next(it, &value);
#else
    res = gst_iterator_next(it, (void **) &element);
#endif
    switch(res) {
      case GST_ITERATOR_OK:
      {
#if GST_VERSION_MAJOR >= 1
        element = GST_ELEMENT (g_value_get_object (&value));
#endif
        GstElementFactory* factory = gst_element_get_factory(element);
        if (factory) {
          const char* klass = gst_element_factory_get_klass(factory);
          GstPad* pad = gst_element_get_static_pad(element, "sink");
          if (pad) {
            GstCaps* caps;

#if GST_VERSION_MAJOR >= 1
            caps = gst_pad_get_current_caps(pad);
#else
            caps = gst_pad_get_negotiated_caps(pad);
#endif

            if (caps) {
              /* check for demuxers but ignore elements like id3demux */
              if (strstr (klass, "Demuxer") && !strstr(klass, "Metadata"))
                unsupported = !GStreamerFormatHelper::Instance()->CanHandleContainerCaps(caps);
              else if (strstr (klass, "Decoder") && !strstr(klass, "Generic"))
                unsupported = !GStreamerFormatHelper::Instance()->CanHandleCodecCaps(caps);

              gst_caps_unref(caps);
            }
            gst_object_unref(pad);
          }
        }

#if GST_VERSION_MAJOR >= 1
        g_value_unset (&value);
#else
        gst_object_unref(element);
#endif
        done = unsupported;
        break;
      }
      case GST_ITERATOR_RESYNC:
        unsupported = false;
        break;
      case GST_ITERATOR_ERROR:
        done = true;
        break;
      case GST_ITERATOR_DONE:
        done = true;
        break;
    }
  }

  gst_iterator_free(it);

  return unsupported ? NS_ERROR_FAILURE : NS_OK;
}

nsresult GStreamerReader::ResetDecode()
{
  nsresult res = NS_OK;

  LOG(LogLevel::Debug, "reset decode");

  if (NS_FAILED(MediaDecoderReader::ResetDecode())) {
    res = NS_ERROR_FAILURE;
  }

  mVideoQueue.Reset();
  mAudioQueue.Reset();

  mVideoSinkBufferCount = 0;
  mAudioSinkBufferCount = 0;
  mReachedAudioEos = false;
  mReachedVideoEos = false;
#if GST_VERSION_MAJOR >= 1
  mConfigureAlignment = true;
#endif

  LOG(LogLevel::Debug, "reset decode done");

  return res;
}

bool GStreamerReader::DecodeAudioData()
{
  MOZ_ASSERT(OnTaskQueue());

  GstBuffer *buffer = nullptr;

  {
    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);

    if (mReachedAudioEos && !mAudioSinkBufferCount) {
      return false;
    }

    /* Wait something to be decoded before return or continue */
    if (!mAudioSinkBufferCount) {
      if(!mVideoSinkBufferCount) {
        /* We have nothing decoded so it makes no sense to return to the state machine
         * as it will call us back immediately, we'll return again and so on, wasting
         * CPU cycles for no job done. So, block here until there is either video or
         * audio data available
        */
        mon.Wait();
        if (!mAudioSinkBufferCount) {
          /* There is still no audio data available, so either there is video data or
           * something else has happened (Eos, etc...). Return to the state machine
           * to process it.
           */
          return true;
        }
      }
      else {
        return true;
      }
    }

#if GST_VERSION_MAJOR >= 1
    GstSample *sample = gst_app_sink_pull_sample(mAudioAppSink);
    buffer = gst_buffer_ref(gst_sample_get_buffer(sample));
    gst_sample_unref(sample);
#else
    buffer = gst_app_sink_pull_buffer(mAudioAppSink);
#endif

    mAudioSinkBufferCount--;
  }

  int64_t timestamp = GST_BUFFER_TIMESTAMP(buffer);
  {
    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
    timestamp = gst_segment_to_stream_time(&mAudioSegment,
                                           GST_FORMAT_TIME, timestamp);
  }
  timestamp = GST_TIME_AS_USECONDS(timestamp);

  int64_t offset = GST_BUFFER_OFFSET(buffer);
  guint8* data;
#if GST_VERSION_MAJOR >= 1
  GstMapInfo info;
  gst_buffer_map(buffer, &info, GST_MAP_READ);
  unsigned int size = info.size;
  data = info.data;
#else
  unsigned int size = GST_BUFFER_SIZE(buffer);
  data = GST_BUFFER_DATA(buffer);
#endif
  int32_t frames = (size / sizeof(AudioDataValue)) / mInfo.mAudio.mChannels;

  typedef AudioCompactor::NativeCopy GstCopy;
  mAudioCompactor.Push(offset,
                       timestamp,
                       mInfo.mAudio.mRate,
                       frames,
                       mInfo.mAudio.mChannels,
                       GstCopy(data,
                               size,
                               mInfo.mAudio.mChannels));
#if GST_VERSION_MAJOR >= 1
  gst_buffer_unmap(buffer, &info);
#endif

  gst_buffer_unref(buffer);

  return true;
}

bool GStreamerReader::DecodeVideoFrame(bool &aKeyFrameSkip,
                                       int64_t aTimeThreshold)
{
  MOZ_ASSERT(OnTaskQueue());

  GstBuffer *buffer = nullptr;

  {
    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);

    if (mReachedVideoEos && !mVideoSinkBufferCount) {
      return false;
    }

    /* Wait something to be decoded before return or continue */
    if (!mVideoSinkBufferCount) {
      if (!mAudioSinkBufferCount) {
        /* We have nothing decoded so it makes no sense to return to the state machine
         * as it will call us back immediately, we'll return again and so on, wasting
         * CPU cycles for no job done. So, block here until there is either video or
         * audio data available
        */
        mon.Wait();
        if (!mVideoSinkBufferCount) {
          /* There is still no video data available, so either there is audio data or
           * something else has happened (Eos, etc...). Return to the state machine
           * to process it
           */
          return true;
        }
      }
      else {
        return true;
      }
    }

    mDecoder->NotifyDecodedFrames(0, 1, 0);

#if GST_VERSION_MAJOR >= 1
    GstSample *sample = gst_app_sink_pull_sample(mVideoAppSink);
    buffer = gst_buffer_ref(gst_sample_get_buffer(sample));
    gst_sample_unref(sample);
#else
    buffer = gst_app_sink_pull_buffer(mVideoAppSink);
#endif
    mVideoSinkBufferCount--;
  }

  bool isKeyframe = !GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  if ((aKeyFrameSkip && !isKeyframe)) {
    mDecoder->NotifyDecodedFrames(0, 0, 1);
    gst_buffer_unref(buffer);
    return true;
  }

  int64_t timestamp = GST_BUFFER_TIMESTAMP(buffer);
  {
    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
    timestamp = gst_segment_to_stream_time(&mVideoSegment,
                                           GST_FORMAT_TIME, timestamp);
  }
  NS_ASSERTION(GST_CLOCK_TIME_IS_VALID(timestamp),
               "frame has invalid timestamp");

  timestamp = GST_TIME_AS_USECONDS(timestamp);
  int64_t duration = 0;
  if (GST_CLOCK_TIME_IS_VALID(GST_BUFFER_DURATION(buffer)))
    duration = GST_TIME_AS_USECONDS(GST_BUFFER_DURATION(buffer));
  else if (fpsNum && fpsDen)
    /* add 1-frame duration */
    duration = gst_util_uint64_scale(GST_USECOND, fpsDen, fpsNum);

  if (timestamp < aTimeThreshold) {
    LOG(LogLevel::Debug, "skipping frame %" GST_TIME_FORMAT
                      " threshold %" GST_TIME_FORMAT,
                      GST_TIME_ARGS(timestamp * 1000),
                      GST_TIME_ARGS(aTimeThreshold * 1000));
    gst_buffer_unref(buffer);
    return true;
  }

  if (!buffer)
    /* no more frames */
    return true;

#if GST_VERSION_MAJOR >= 1
  if (mConfigureAlignment && buffer->pool) {
    GstStructure *config = gst_buffer_pool_get_config(buffer->pool);
    GstVideoAlignment align;
    if (gst_buffer_pool_config_get_video_alignment(config, &align))
      gst_video_info_align(&mVideoInfo, &align);
    gst_structure_free(config);
    mConfigureAlignment = false;
  }
#endif

  nsRefPtr<PlanarYCbCrImage> image = GetImageFromBuffer(buffer);
  if (!image) {
    /* Ugh, upstream is not calling gst_pad_alloc_buffer(). Fallback to
     * allocating a PlanarYCbCrImage backed GstBuffer here and memcpy.
     */
    GstBuffer* tmp = nullptr;
    CopyIntoImageBuffer(buffer, &tmp, image);
    gst_buffer_unref(buffer);
    buffer = tmp;
  }

  int64_t offset = mResource.Tell(); // Estimate location in media.
  nsRefPtr<VideoData> video = VideoData::CreateFromImage(mInfo.mVideo,
                                                         mDecoder->GetImageContainer(),
                                                         offset, timestamp, duration,
                                                         static_cast<Image*>(image.get()),
                                                         isKeyframe, -1, mPicture);
  mVideoQueue.Push(video);

  gst_buffer_unref(buffer);

  return true;
}
GstElement *
create_codec_bin_from_blueprint (const FsCodec *codec,
    CodecBlueprint *blueprint, const gchar *name, gboolean is_send,
    GError **error)
{
  GstElement *codec_bin = NULL;
  gchar *direction_str = (is_send == TRUE) ? "send" : "receive";
  GList *walk = NULL;
  GstElement *current_element = NULL;
  GstElement *previous_element = NULL;
  GList *pipeline_factory = NULL;

  if (is_send)
    pipeline_factory = blueprint->send_pipeline_factory;
  else
    pipeline_factory = blueprint->receive_pipeline_factory;

  if (!pipeline_factory)
  {
    g_set_error (error, FS_ERROR, FS_ERROR_UNKNOWN_CODEC,
        "The %s codec %s does not have a pipeline,"
        " its probably a special codec",
        fs_media_type_to_string (codec->media_type),
        codec->encoding_name);
    return NULL;
  }

  GST_DEBUG ("creating %s codec bin for id %d, pipeline_factory %p",
    direction_str, codec->id, pipeline_factory);
  if (is_send)
    codec_bin = gst_bin_new (name);
  else
    codec_bin = fs_rtp_bin_error_downgrade_new (name);

  for (walk = g_list_first (pipeline_factory); walk; walk = g_list_next (walk))
  {
    if (g_list_next (g_list_first (walk->data)))
    {
      /* We have to check some kind of configuration to see if we have a
         favorite */
      current_element = gst_element_factory_make ("autoconvert", NULL);

      if (!current_element)
      {
        g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,
          "Could not create autoconvert element");
        goto error;
      }

      g_object_set (current_element, "factories", walk->data, NULL);
    } else {
      current_element =
        gst_element_factory_create (
            GST_ELEMENT_FACTORY (g_list_first (walk->data)->data), NULL);
      if (!current_element)
      {
        g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,
          "Could not create element for pt %d", codec->id);
        goto error;
      }
    }

    if (!gst_bin_add (GST_BIN (codec_bin), current_element))
    {
      g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,
        "Could not add new element to %s codec_bin for pt %d",
        direction_str, codec->id);
      goto error;
    }

    if (_g_object_has_property (G_OBJECT (current_element), "pt"))
      g_object_set (current_element, "pt", codec->id,
        NULL);

    /* Lets create the ghost pads on the codec bin */

    if (g_list_previous (walk) == NULL)
      /* if its the first element of the codec bin */
      if (!_create_ghost_pad (current_element,
              is_send ? "src" : "sink", codec_bin, error))
        goto error;

    if (g_list_next (walk) == NULL)
      /* if its the last element of the codec bin */
      if (!_create_ghost_pad (current_element,
              is_send ? "sink" : "src" , codec_bin, error))
        goto error;


    /* let's link them together using the specified media_caps if any
     * this will ensure that multi-codec encoders/decoders will select the
     * appropriate codec based on caps negotiation */
    if (previous_element)
    {
      GstPad *sinkpad;
      GstPad *srcpad;
      GstPadLinkReturn ret;

      if (is_send)
        sinkpad = gst_element_get_static_pad (previous_element, "sink");
      else
        sinkpad = gst_element_get_static_pad (current_element, "sink");

      if (!sinkpad)
      {
        g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,
          "Could not get the sink pad one of the elements in the %s codec bin"
          " for pt %d", direction_str, codec->id);
        goto error;
      }


      if (is_send)
        srcpad = gst_element_get_static_pad (current_element, "src");
      else
        srcpad = gst_element_get_static_pad (previous_element, "src");

      if (!srcpad)
      {
        g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,
          "Could not get the src pad one of the elements in the %s codec bin"
          " for pt %d", direction_str, codec->id);
        gst_object_unref (sinkpad);
        goto error;
      }

      ret = gst_pad_link (srcpad, sinkpad);

      gst_object_unref (srcpad);
      gst_object_unref (sinkpad);

      if (GST_PAD_LINK_FAILED (ret))
      {
        g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,
          "Could not link element inside the %s codec bin for pt %d",
          direction_str, codec->id);
        goto error;
      }
    }

    previous_element = current_element;
  }

  return codec_bin;

 error:
  gst_object_unref (codec_bin);
  return NULL;
}