コード例 #1
0
static gboolean
mx_gst_pad_is_compatible(MxGstGraphElementPad *src, 
    MxGstGraphElementPad *dest)
{
  GstCaps *src_caps  = gst_pad_get_caps(src->priv->pad);
  GstCaps *dest_caps = gst_pad_get_caps(dest->priv->pad);
  
  if( (GST_PAD_SRC != gst_pad_get_direction(src->priv->pad)) ||
      (GST_PAD_SINK != gst_pad_get_direction(dest->priv->pad)) )
  {
    return FALSE;
  }

  if(gst_caps_is_any(src_caps) || gst_caps_is_any(dest_caps))
  {
    return TRUE;
  }

  if(gst_caps_is_empty(src_caps) || gst_caps_is_empty(dest_caps))
  {
    return FALSE;
  }

  return !gst_caps_is_empty(gst_caps_intersect(src_caps, dest_caps));
}
コード例 #2
0
ファイル: eprocessat.cpp プロジェクト: acamara/videoview
static void cb_newpad_video (GstElement *decodebin, GstPad *pad, gboolean last, gpointer data)
{
  EntradaFitxer *grup = (EntradaFitxer*)data;
  GstCaps *caps;
  GstStructure *str;
  GstPad *videopad;

  //Linkem només una vegada
  videopad = gst_element_get_static_pad (grup->v.bin, "sink");
  if (GST_PAD_IS_LINKED (videopad)) {
    g_object_unref (videopad);
    return;
  }

  //Mirem els tipus de dades multimedia
  caps = gst_pad_get_caps (pad);
  str = gst_caps_get_structure (caps, 0);
  if (!g_strrstr (gst_structure_get_name (str), "video")) {
    gst_caps_unref (caps);
    gst_object_unref (videopad);
    return;
  }
  gst_caps_unref (caps);

  //Link'n'play
  gst_pad_link (pad, videopad);

  g_object_unref (videopad);
}
コード例 #3
0
ファイル: dlna-encoding.c プロジェクト: luisbg/gupnp-dlna
static void
pad_added_cb (GstElement * uridecodebin, GstPad * pad, GstElement * encodebin)
{
  GstPad *sinkpad;

  sinkpad = gst_element_get_compatible_pad (encodebin, pad, NULL);

  if (sinkpad == NULL) {
    GstCaps *caps;

    /* Ask encodebin for a compatible pad */
    caps = gst_pad_get_caps (pad);
    g_signal_emit_by_name (encodebin, "request-pad", caps, &sinkpad);
    if (caps)
      gst_caps_unref (caps);
  }
  if (sinkpad == NULL) {
    g_print ("Couldn't get an encoding channel for pad %s:%s\n",
        GST_DEBUG_PAD_NAME (pad));
    return;
  }

  if (G_UNLIKELY (gst_pad_link (pad, sinkpad) != GST_PAD_LINK_OK)) {
    g_print ("Couldn't link pads\n");
  }

  return;
}
コード例 #4
0
ファイル: gstplayer.cpp プロジェクト: jbruggem/jingles-impro
void GstPlayer::handleAddedPad(GstElement * upstream, GstPad * upstreamNewPad, GstElement * downstream) {

  GstPad *downstreamPad = gst_element_get_static_pad ( downstream, "sink");
  GstPadLinkReturn result;
  GstCaps * newPadCaps = NULL;
  GstStructure * newPadStruct = NULL;
  const gchar * newPadType = NULL;

  QLOG_TRACE() << "Got pad " << GST_PAD_NAME (upstreamNewPad) << " from " << GST_ELEMENT_NAME (upstream);

  if (gst_pad_is_linked (downstreamPad)) {
      QLOG_TRACE() << " Pad already connected to downstream.";
  }else{
      newPadCaps = gst_pad_get_caps (upstreamNewPad);
      newPadStruct = gst_caps_get_structure (newPadCaps, 0);
      newPadType = gst_structure_get_name (newPadStruct);

      if (!g_str_has_prefix (newPadType, "audio/x-raw")) {
          QLOG_TRACE() << "Pad is not of type is not raw audio but of type "<< newPadType <<". Can't connect.";
      }else{
          result = gst_pad_link (upstreamNewPad, downstreamPad);
          if (GST_PAD_LINK_FAILED (result)) {
              QLOG_TRACE() << "Failed to link.";
          } else {
              QLOG_TRACE() << "Link successful.";
          }
      }
    }

  if (newPadCaps != NULL)
    gst_caps_unref (newPadCaps);
  gst_object_unref (downstreamPad);
}
static void
Lastfmfp_cb_newpad(GstElement *decodebin, GstPad *pad, gboolean last, LastfmfpAudio *ma)
{
    GstCaps *caps;
    GstStructure *str;
    GstPad *audiopad;

    // only link once
    audiopad = gst_element_get_pad(ma->audio, "sink");
    if (GST_PAD_IS_LINKED(audiopad)) {
        g_object_unref(audiopad);
        return;
    }

    // check media type
    caps = gst_pad_get_caps(pad);
    str = gst_caps_get_structure(caps, 0);

    if (!g_strrstr(gst_structure_get_name(str), "audio")) {
        gst_caps_unref(caps);
        gst_object_unref(audiopad);
        return;
    }
    gst_caps_unref(caps);

    // link
    gst_pad_link(pad, audiopad);
    gst_object_unref(audiopad);
}
コード例 #6
0
static GstStructure *
collect_stream_information (GstDiscoverer * dc, PrivateStream * ps, guint idx)
{
  GstCaps *caps;
  GstStructure *st;
  gchar *stname;

  stname = g_strdup_printf ("stream-%02d", idx);
  st = gst_structure_empty_new (stname);
  g_free (stname);

  /* Get caps */
  caps = gst_pad_get_negotiated_caps (ps->pad);
  if (!caps) {
    GST_WARNING ("Couldn't get negotiated caps from %s:%s",
        GST_DEBUG_PAD_NAME (ps->pad));
    caps = gst_pad_get_caps (ps->pad);
  }
  if (caps) {
    GST_DEBUG ("Got caps %" GST_PTR_FORMAT, caps);
    gst_structure_id_set (st, _CAPS_QUARK, GST_TYPE_CAPS, caps, NULL);

    gst_caps_unref (caps);
  }
  if (ps->tags)
    gst_structure_id_set (st, _TAGS_QUARK, GST_TYPE_STRUCTURE, ps->tags, NULL);

  return st;
}
コード例 #7
0
static void
new_decoded_pad_cb(GstElement *demuxer,
                   GstPad *new_pad,
                   gpointer user_data)
{
   GstElement *decoder;
   GstPad *pad;
   GstCaps *caps;
   gchar *str;

   caps = gst_pad_get_caps(new_pad);
   str = gst_caps_to_string(caps);

   if (g_str_has_prefix(str, "video/"))
     {
        decoder = GST_ELEMENT(user_data);

        pad = gst_element_get_pad(decoder, "sink");
        if (GST_PAD_LINK_FAILED(gst_pad_link(new_pad, pad)))
          {
             g_warning("Failed to link %s:%s to %s:%s", GST_DEBUG_PAD_NAME(new_pad),
                       GST_DEBUG_PAD_NAME(pad));
          }
     }
   g_free(str);
   gst_caps_unref(caps);
}
コード例 #8
0
ファイル: cvcap_gstreamer.cpp プロジェクト: alejotima/opencv
//
// connect decodebin's dynamically created source pads to colourconverter
//
static void icvNewPad(GstElement *decodebin, GstPad *pad, gboolean last, gpointer data)
{
	GstElement *sink = GST_ELEMENT(data);
	GstStructure *str;
	GstPad *sinkpad;
	GstCaps *caps;

	/* link only once */
	sinkpad = gst_element_get_pad(sink, "sink");

	if(GST_PAD_IS_LINKED(sinkpad)) {
		g_print("sink is already linked\n");
		g_object_unref(sinkpad);
		return;
	}

	/* check media type */
	caps = gst_pad_get_caps(pad);
	str = gst_caps_get_structure(caps, 0);
	const char *structname = gst_structure_get_name(str);
//	g_print("new pad %s\n", structname);
	if(!g_strrstr(structname, "video")) {
		gst_caps_unref(caps);
		gst_object_unref(sinkpad);
		return;
	}
	printf("linking pad %s\n", structname);

	/* link'n'play */
	gst_pad_link (pad, sinkpad);

	gst_caps_unref(caps);
	gst_object_unref(sinkpad);
}
コード例 #9
0
ファイル: decodebin.c プロジェクト: kuailexs/symbiandump-mw1
static void
cb_newpad (GstElement *decodebin,
	   GstPad     *pad,
	   gboolean    last,
	   gpointer    data)
{
  GstCaps *caps;
  GstStructure *str;
  GstPad *audiopad;

  /* only link once */
  audiopad = gst_element_get_pad (audio, "sink");
  if (GST_PAD_IS_LINKED (audiopad)) {
    g_object_unref (audiopad);
    return;
  }

  /* check media type */
  caps = gst_pad_get_caps (pad);
  str = gst_caps_get_structure (caps, 0);
  if (!g_strrstr (gst_structure_get_name (str), "audio")) {
    gst_caps_unref (caps);
    gst_object_unref (audiopad);
    return;
  }
  gst_caps_unref (caps);

  /* link'n'play */
  gst_pad_link (pad, audiopad);
}
コード例 #10
0
ファイル: banshee-transcoder.c プロジェクト: Rizean/banshee
static void
gst_transcoder_new_decoded_pad(GstElement *decodebin, GstPad *pad, 
    gboolean last, gpointer data)
{
    GstCaps *caps;
    GstStructure *str;
    GstPad *audiopad;
    GstTranscoder *transcoder = (GstTranscoder *)data;

    g_return_if_fail(transcoder != NULL);

    audiopad = gst_element_get_pad(transcoder->sink_bin, "sink");
    
    if(GST_PAD_IS_LINKED(audiopad)) {
        g_object_unref(audiopad);
        return;
    }

    caps = gst_pad_get_caps(pad);
    str = gst_caps_get_structure(caps, 0);
    
    if(!g_strrstr(gst_structure_get_name(str), "audio")) {
        gst_caps_unref(caps);
        gst_object_unref(audiopad);
        return;
    }
   
    gst_caps_unref(caps);
    gst_pad_link(pad, audiopad);
}
コード例 #11
0
void Pipeline::cb_new_pad (GstElement* decodebin, GstPad* pad, gboolean last, GstElement* glimagesink)
{
    GstPad* glpad = gst_element_get_pad (glimagesink, "sink");
    
    //only link once 
    if (GST_PAD_IS_LINKED (glpad)) 
    {
        gst_object_unref (glpad);
        return;
    }

    GstCaps* caps = gst_pad_get_caps (pad);
    GstStructure* str = gst_caps_get_structure (caps, 0);
    if (!g_strrstr (gst_structure_get_name (str), "video")) 
    {
        gst_caps_unref (caps);
        gst_object_unref (glpad);
        return;
    }
    gst_caps_unref (caps);

    GstPadLinkReturn ret = gst_pad_link (pad, glpad);
    if (ret != GST_PAD_LINK_OK) 
        g_warning ("Failed to link with decodebin!\n");
}
コード例 #12
0
ファイル: transcoder.cpp プロジェクト: ipse666/cueplayer
static void
cb_newpad (GstElement *decodebin,
           GstPad     *pad,
           gboolean    last,
           gpointer    data)
{
    GstCaps *caps;
    GstStructure *str;
    GstPad *audiopad;

    audiopad = gst_element_get_static_pad (audio, "sink");
    if (GST_PAD_IS_LINKED (audiopad)) {
        g_object_unref (audiopad);
        decodebin = NULL;
        last = false;
        data = NULL;
        return;
    }

    caps = gst_pad_get_caps (pad);
    str = gst_caps_get_structure (caps, 0);
    if (!g_strrstr (gst_structure_get_name (str), "audio")) {
        gst_caps_unref (caps);
        gst_object_unref (audiopad);
        return;
    }
    gst_caps_unref (caps);

    gst_pad_link (pad, audiopad);
}
コード例 #13
0
static gboolean
gst_wayland_sink_set_caps (GstBaseSink * bsink, GstCaps * caps)
{
  GstWaylandSink *sink = GST_WAYLAND_SINK (bsink);
  const GstStructure *structure;
  GstCaps *allowed_caps;
  gboolean ret = TRUE;

  GST_LOG_OBJECT (sink, "set caps %" GST_PTR_FORMAT, caps);

  allowed_caps = gst_pad_get_caps (GST_BASE_SINK_PAD (bsink));

  if (!gst_caps_can_intersect (allowed_caps, caps))
    return FALSE;

  structure = gst_caps_get_structure (caps, 0);

  ret &= gst_structure_get_int (structure, "width", &sink->video_width);
  ret &= gst_structure_get_int (structure, "height", &sink->video_height);

  if (!ret)
    return FALSE;

  gst_caps_replace (&sink->caps, caps);

  return TRUE;
}
コード例 #14
0
ファイル: filtercaps.c プロジェクト: WangCrystal/gstreamer
gint
main (gint argc, gchar ** argv)
{
  GstCaps *caps;
  GstElement *sink, *identity;
  GstElement *pipeline;

  gst_init (&argc, &argv);

  pipeline = gst_pipeline_new ("pipeline");
  g_assert (pipeline);
  identity = gst_element_factory_make ("identity", NULL);
  g_assert (identity);
  sink = gst_element_factory_make ("fakesink", NULL);
  g_assert (sink);
  gst_bin_add_many (GST_BIN (pipeline), identity, sink, NULL);
  gst_element_link_filtered (identity, sink,
      gst_caps_new_simple ("audio/x-raw-int", NULL));
  caps = gst_pad_get_caps (gst_element_get_pad (identity, "sink"));
  g_print ("caps:         %s\n", gst_caps_to_string (caps));
  g_assert (!gst_caps_is_any (caps));

  caps = gst_pad_get_allowed_caps (gst_element_get_pad (identity, "sink"));
  g_print ("allowed caps: %s\n", gst_caps_to_string (caps));
  /* get_allowed_caps doesn't mean anything if you aren't connected */
  g_assert (!caps);

  return 0;
}
コード例 #15
0
static void
new_decoded_pad (GstElement * dec, GstPad * new_pad, gboolean last,
    AppInfo * info)
{
  const gchar *sname;
  GstElement *csp, *scale, *filter;
  GstStructure *s;
  GstCaps *caps;
  GstPad *sinkpad;

  /* already found a video stream? */
  if (info->got_video)
    return;

  /* FIXME: is this racy or does decodebin2 make sure caps are always
   * negotiated at this point? */
  caps = gst_pad_get_caps (new_pad);
  g_return_if_fail (caps != NULL);

  s = gst_caps_get_structure (caps, 0);
  sname = gst_structure_get_name (s);
  if (!g_str_has_prefix (sname, "video/x-raw-"))
    goto not_video;

  csp = create_element ("ffmpegcolorspace");
  scale = create_element ("videoscale");
  filter = create_element ("capsfilter");
  info->sink = create_element ("gdkpixbufsink");
  g_object_set (info->sink, "qos", FALSE, "max-lateness", (gint64) - 1, NULL);

  gst_bin_add_many (GST_BIN (info->pipe), csp, scale, filter, info->sink, NULL);

  sinkpad = gst_element_get_static_pad (csp, "sink");
  if (GST_PAD_LINK_FAILED (gst_pad_link (new_pad, sinkpad)))
    g_error ("Can't link new decoded pad to ffmpegcolorspace's sink pad");
  gst_object_unref (sinkpad);

  if (!gst_element_link (csp, scale))
    g_error ("Can't link ffmpegcolorspace to videoscale");
  if (!gst_element_link (scale, filter))
    g_error ("Can't link videoscale to capsfilter");
  if (!gst_element_link (filter, info->sink))
    g_error ("Can't link capsfilter to gdkpixbufsink");

  gst_element_set_state (info->sink, GST_STATE_PAUSED);
  gst_element_set_state (filter, GST_STATE_PAUSED);
  gst_element_set_state (scale, GST_STATE_PAUSED);
  gst_element_set_state (csp, GST_STATE_PAUSED);

  info->got_video = TRUE;
  return;

not_video:
  {
    if (last) {
      g_error ("This file does not contain a video track, or you do not have "
          "the necessary decoder(s) installed");
    }
  }
}
コード例 #16
0
static void
bbd_new_decoded_pad(GstElement *decodebin, GstPad *pad, 
    gboolean last, gpointer data)
{
    GstCaps *caps;
    GstStructure *str;
    GstPad *audiopad;
    BansheeBpmDetector *detector = (BansheeBpmDetector *)data;

    g_return_if_fail(detector != NULL);

    audiopad = gst_element_get_pad(detector->audioconvert, "sink");
    
    if(GST_PAD_IS_LINKED(audiopad)) {
        g_object_unref(audiopad);
        return;
    }

    caps = gst_pad_get_caps(pad);
    str = gst_caps_get_structure(caps, 0);
    
    if(!g_strrstr(gst_structure_get_name(str), "audio")) {
        gst_caps_unref(caps);
        gst_object_unref(audiopad);
        return;
    }
   
    gst_caps_unref(caps);
    gst_pad_link(pad, audiopad);
}
コード例 #17
0
ファイル: rtp_server.c プロジェクト: passoir/rtsp_rtp_server
void on_pad_added(GstElement * element, GstPad * pad)
{
    GstCaps * caps;
    GstStructure * str;
    GstPad * targetsink = NULL;

    caps = gst_pad_get_caps(pad);
    g_assert(caps != NULL);
    str = gst_caps_get_structure(caps, 0);
    g_assert(str != NULL);

    /* if the file has video and the media type is video connect it to the pipewriter */
    if(g_strrstr(gst_structure_get_name(str), "video"))
    {
        targetsink = gst_element_get_pad(videodec, "sink");
    }
    /* if the file has audio and the media type is audio connect it to the pipewriter */
    else if(g_strrstr(gst_structure_get_name (str), "audio"))
    {
        targetsink = gst_element_get_pad(audiodec, "sink");
    }

    if (targetsink != 0) {
        gst_pad_link(pad, targetsink);
        gst_object_unref(targetsink);
    }
    gst_caps_unref(caps);
}
コード例 #18
0
static void
cb_newpad(GstElement *decodebin,
        GstPad *pad,
        gboolean last,
        gpointer data) {
  GstPad *sinkpad;
  GstCaps *caps;
  GstStructure *str;
  gchar *tex;
  caps = gst_pad_get_caps(pad);
  str = gst_caps_get_structure(caps, 0);
  tex = gst_structure_get_name(str);
  mutex++;
  g_print("TEX:%s \nSTR:%s",tex,gst_structure_get_name (str));
  if (g_strrstr(gst_structure_get_name(str), "audio"))
  {
    sinkpad = gst_element_get_static_pad(audio, "sink");
    gst_pad_link(pad, sinkpad);
    gst_object_unref (sinkpad);
    gint curr_vol; 
  }
  else if (g_strrstr(gst_structure_get_name(str), "video"))
  {
    sinkpad = gst_element_get_static_pad(video, "sink");
    gst_pad_link(pad, sinkpad);
    gst_object_unref (sinkpad);
  }
}
コード例 #19
0
static GstCaps *
gst_aspect_ratio_crop_get_caps (GstPad * pad)
{
  GstPad *peer;
  GstAspectRatioCrop *aspect_ratio_crop;
  GstCaps *return_caps;

  aspect_ratio_crop = GST_ASPECT_RATIO_CROP (gst_pad_get_parent (pad));

  g_mutex_lock (aspect_ratio_crop->crop_lock);

  peer = gst_pad_get_peer (aspect_ratio_crop->sink);
  if (peer == NULL) {
    return_caps = gst_static_pad_template_get_caps (&src_template);
    gst_caps_ref (return_caps);
  } else {
    GstCaps *peer_caps;

    peer_caps = gst_pad_get_caps (peer);
    return_caps =
        gst_aspect_ratio_crop_transform_caps (aspect_ratio_crop, peer_caps);
    gst_caps_unref (peer_caps);
    gst_object_unref (peer);
  }

  g_mutex_unlock (aspect_ratio_crop->crop_lock);
  gst_object_unref (aspect_ratio_crop);

  return return_caps;
}
コード例 #20
0
ファイル: TapeComposition.cpp プロジェクト: rainChu/ytp-king
void
TapeComposition::onPadAdded( GstElement *src, GstPad *new_pad, GstElement *sink )
{
	GstPad *compatiblePad = gst_element_get_compatible_pad( sink, new_pad, gst_pad_get_caps( new_pad ) );

	if ( compatiblePad )
		gst_pad_link( new_pad, compatiblePad );
}
コード例 #21
0
static void
get_device_data (ofGstDevice &webcam_device)
{
    char                *pipeline_desc;
    GstElement          *pipeline;
    GError              *err;
    GstStateChangeReturn ret;
    GstMessage          *msg;
    GstBus              *bus;

    {
        pipeline_desc = g_strdup_printf ("%s name=source device=%s ! fakesink",
                                         webcam_device.gstreamer_src,
                                         webcam_device.video_device);
        err      = NULL;
        pipeline = gst_parse_launch (pipeline_desc, &err);
        if ((pipeline != NULL) && (err == NULL))
        {
            /* Start the pipeline and wait for max. 10 seconds for it to start up */
            gst_element_set_state (pipeline, GST_STATE_PLAYING);
            ret = gst_element_get_state (pipeline, NULL, NULL, 10 * GST_SECOND);

            /* Check if any error messages were posted on the bus */
            bus = gst_element_get_bus (pipeline);
            msg = gst_bus_poll (bus, GST_MESSAGE_ERROR, 0);
            gst_object_unref (bus);

            if ((msg == NULL) && (ret == GST_STATE_CHANGE_SUCCESS))
            {
                GstElement *src;
                GstPad     *pad;
                char       *name;
                GstCaps    *caps;

                gst_element_set_state (pipeline, GST_STATE_PAUSED);

                src = gst_bin_get_by_name (GST_BIN (pipeline), "source");

                g_object_get (G_OBJECT (src), "device-name", &name, (void*)NULL);
                if (name == NULL)
                    name = "Unknown";

//        ofLog(OF_LOG_VERBOSE,"Device: %s (%s)\n", name, webcam_device.video_device);
                pad  = gst_element_get_pad (src, "src");
                caps = gst_pad_get_caps (pad);
                gst_object_unref (pad);
                get_supported_video_formats (webcam_device, *caps);
                gst_caps_unref (caps);
            }
            gst_element_set_state (pipeline, GST_STATE_NULL);
            gst_object_unref (pipeline);
        }
        if (err)
            g_error_free (err);

        g_free (pipeline_desc);
    }
}
コード例 #22
0
ファイル: ofGstUtils.cpp プロジェクト: 6301158/SmileFile
static void get_device_data (ofGstDevice &webcam_device, int desired_framerate)
{
    string pipeline_desc = webcam_device.gstreamer_src + " name=source device=" +
            webcam_device.video_device + " ! fakesink";

    GError * err = NULL;
    GstElement * pipeline = gst_parse_launch (pipeline_desc.c_str(), &err);
    if ((pipeline == NULL) || (err != NULL)){
    	if (err){
    		ofLog(OF_LOG_ERROR, "ofGstUtils: error getting device data: %s", err->message);
    		g_error_free (err);
    	}else{
    		ofLog(OF_LOG_ERROR, "ofGstUtils: error getting device data, cannot get pipeline");
    	}
    	if(pipeline)
    		gst_object_unref (pipeline);
    	return;
    }

	// TODO: try to lower seconds,
    // Start the pipeline and wait for max. 10 seconds for it to start up
	gst_element_set_state (pipeline, GST_STATE_PLAYING);
	GstStateChangeReturn ret = gst_element_get_state (pipeline, NULL, NULL, 10 * GST_SECOND);

	// Check if any error messages were posted on the bus
	GstBus * bus = gst_element_get_bus (pipeline);
	GstMessage * msg = gst_bus_poll (bus, GST_MESSAGE_ERROR, 0);
	gst_object_unref (bus);

	if ((msg == NULL) && (ret == GST_STATE_CHANGE_SUCCESS)){
		gst_element_set_state (pipeline, GST_STATE_PAUSED);

		GstElement *src = gst_bin_get_by_name (GST_BIN (pipeline), "source");
		char       *name;
		g_object_get (G_OBJECT (src), "device-name", &name, (void*)NULL);

		ofLog(OF_LOG_VERBOSE, "Device: %s (%s)\n", name==NULL?"":name, webcam_device.video_device.c_str());
		GstPad     *pad  = gst_element_get_pad (src, "src");
		GstCaps    *caps = gst_pad_get_caps (pad);
		gst_object_unref (pad);

		get_supported_video_formats (webcam_device, *caps, desired_framerate);

		gst_caps_unref (caps);
	}else if(msg){
		gchar *debug;
		gst_message_parse_error(msg, &err, &debug);

		ofLog(OF_LOG_ERROR, "ofGstUtils: error getting device data; module %s reported: %s",
			  gst_element_get_name(GST_MESSAGE_SRC (msg)), err->message);

		g_error_free(err);
		g_free(debug);
	}
	gst_element_set_state (pipeline, GST_STATE_NULL);
	gst_object_unref (pipeline);

}
コード例 #23
0
ファイル: gstdc1394.c プロジェクト: asrashley/gst-plugins-bad
static GstFlowReturn
gst_dc1394_create (GstPushSrc * psrc, GstBuffer ** buffer)
{
    GstDc1394 *src;
    GstBuffer *outbuf;
    GstCaps *caps;
    dc1394video_frame_t *frame[1];
    GstFlowReturn res = GST_FLOW_OK;
    dc1394error_t err;

    src = GST_DC1394 (psrc);

    err = dc1394_capture_dequeue (src->camera, DC1394_CAPTURE_POLICY_WAIT, frame);

    if (err != DC1394_SUCCESS) {
        GST_ELEMENT_ERROR (src, RESOURCE, FAILED,
                           ("failed to dequeue frame"), ("failed to dequeue frame"));
        goto error;
    }

    outbuf = gst_buffer_new_and_alloc (frame[0]->image_bytes);

    memcpy (GST_BUFFER_MALLOCDATA (outbuf), (guchar *) frame[0]->image,
            frame[0]->image_bytes * sizeof (guchar));

    GST_BUFFER_DATA (outbuf) = GST_BUFFER_MALLOCDATA (outbuf);

    caps = gst_pad_get_caps (GST_BASE_SRC_PAD (psrc));
    gst_buffer_set_caps (outbuf, caps);
    gst_caps_unref (caps);

    GST_BUFFER_TIMESTAMP (outbuf) = src->timestamp_offset + src->running_time;
    if (src->rate_numerator != 0) {
        GST_BUFFER_DURATION (outbuf) = gst_util_uint64_scale_int (GST_SECOND,
                                       src->rate_denominator, src->rate_numerator);
    }

    src->n_frames++;
    if (src->rate_numerator != 0) {
        src->running_time = gst_util_uint64_scale_int (src->n_frames * GST_SECOND,
                            src->rate_denominator, src->rate_numerator);
    }

    if (dc1394_capture_enqueue (src->camera, frame[0]) != DC1394_SUCCESS) {
        GST_ELEMENT_ERROR (src, RESOURCE, FAILED, ("failed to enqueue frame"),
                           ("failed to enqueue frame"));
        goto error;
    }

    *buffer = outbuf;

    return res;

error:
    {
        return GST_FLOW_ERROR;
    }
}
コード例 #24
0
ファイル: osxvideosrc.c プロジェクト: ChinnaSuhas/ossbuild
static GstFlowReturn
gst_osx_video_src_create (GstPushSrc * src, GstBuffer ** buf)
{
  GstOSXVideoSrc *self = GST_OSX_VIDEO_SRC (src);
  ComponentResult err;
  GstCaps *caps;
  //GstClock * clock;

  // ###: we need to sleep between calls to SGIdle.  originally, the sleeping
  //   was done using gst_clock_id_wait(), but it turns out that approach
  //   doesn't work well.  it has two issues:
  //   1) every so often, gst_clock_id_wait() will block for a much longer
  //      period of time than requested (upwards of a minute) causing video
  //      to freeze until it finally returns.  this seems to happen once
  //      every few minutes, which probably means something like 1 in every
  //      several hundred calls gst_clock_id_wait() does the wrong thing.
  //   2) even when the gst_clock approach is working properly, it uses
  //      quite a bit of cpu in comparison to a simple usleep().  on one
  //      test machine, using gst_clock_id_wait() caused osxvideosrc to use
  //      nearly 100% cpu, while using usleep() brough the usage to less
  //      than 10%.
  //
  // so, for now, we comment out the gst_clock stuff and use usleep.

  //clock = gst_system_clock_obtain ();
  do {
    err = SGIdle (self->seq_grab);
    if (err != noErr) {
      GST_ERROR_OBJECT (self, "SGIdle returned %d", (int) err);
      gst_object_unref (clock);
      return GST_FLOW_UNEXPECTED;
    }

    if (self->buffer == NULL) {
      /*GstClockID clock_id;

         clock_id = gst_clock_new_single_shot_id (clock,
         (GstClockTime) (gst_clock_get_time(clock) +
         (GST_SECOND / ((float)FRAMERATE * 2))));
         gst_clock_id_wait (clock_id, NULL);
         gst_clock_id_unref (clock_id); */

      usleep (1000000 / (FRAMERATE * 2));
    }
  } while (self->buffer == NULL);
  //gst_object_unref (clock);

  *buf = self->buffer;
  self->buffer = NULL;

  caps = gst_pad_get_caps (GST_BASE_SRC_PAD (src));
  gst_buffer_set_caps (*buf, caps);
  gst_caps_unref (caps);

  return GST_FLOW_OK;
}
コード例 #25
0
/* This function will be called by the pad-added signal */
static void pad_added_handler(GstElement *src, GstPad *new_pad,
                              CustomData *data)
{
    GstPad *sink_pad = NULL;
    GstPadLinkReturn ret;
    GstCaps *new_pad_caps = NULL;
    GstStructure *new_pad_struct = NULL;
    const gchar *new_pad_type = NULL;

    g_print("Received new pad '%s' from '%s':\n", GST_PAD_NAME(new_pad),
            GST_ELEMENT_NAME(src));

    /* Check the new pad's type */
    new_pad_caps = gst_pad_get_caps(new_pad);
    new_pad_struct = gst_caps_get_structure(new_pad_caps, 0);
    new_pad_type = gst_structure_get_name(new_pad_struct);

    if (g_str_has_prefix(new_pad_type, "video/x-raw")) {
        sink_pad = gst_element_get_static_pad(data->vsink, "sink");
        if (gst_pad_is_linked(sink_pad)) {
            g_print("  We are already linked. Ignoring.\n");
            goto exit;
        }
    }

    if (g_str_has_prefix(new_pad_type, "audio/x-raw")) {
        sink_pad = gst_element_get_static_pad(data->convert, "sink");

        if (gst_pad_is_linked(sink_pad)) {
            g_print("  We are already linked. Ignoring.\n");
            goto exit;
        }
    }

    /* Attempt the link */
    ret = gst_pad_link(new_pad, sink_pad);

    if (GST_PAD_LINK_FAILED(ret)) {
        g_print("  Type is '%s' but link failed.\n", new_pad_type);
    }
    else {
        g_print("  Link succeeded (type '%s').\n", new_pad_type);
    }

exit:

    /* Unreference the new pad's caps, if we got them */
    if (new_pad_caps != NULL)
        gst_caps_unref(new_pad_caps);

    /* Unreference the sink pad */
    if (sink_pad != NULL)
        gst_object_unref(sink_pad);
}
コード例 #26
0
ファイル: video_source.cpp プロジェクト: alg-a/scenic
std::string VideoV4lSource::srcCaps(unsigned int framerateIndex) const
{
    std::ostringstream capsStr;
    GstStateChangeReturn ret = gst_element_set_state(source_, GST_STATE_READY);
    if (ret not_eq GST_STATE_CHANGE_SUCCESS)
        THROW_ERROR("Could not change v4l2src state to READY");
    GstPad *srcPad = gst_element_get_static_pad(source_, "src");
    GstCaps *caps = gst_pad_get_caps(srcPad);
    GstStructure *structure = gst_caps_get_structure(caps, 0);
    const GValue *val = gst_structure_get_value(structure, "framerate");
    LOG_DEBUG("Caps structure from v4l2src srcpad: " << gst_structure_to_string(structure));
    gint framerate_numerator, framerate_denominator;
    if (GST_VALUE_HOLDS_LIST(val))
    {
        // trying another one
        if (framerateIndex >= gst_value_list_get_size(val))
            THROW_ERROR("Framerate index out of range");
        framerate_numerator = gst_value_get_fraction_numerator((gst_value_list_get_value(val, framerateIndex)));
        framerate_denominator = gst_value_get_fraction_denominator((gst_value_list_get_value(val, framerateIndex)));
    }
    else
    {
        // FIXME: this is really bad, we should be iterating over framerates and resolutions until we find a good one
        if (framerateIndex > 0)
            LOG_ERROR("Caps parameters haven't been changed and have failed before");
        framerate_numerator = gst_value_get_fraction_numerator(val);
        framerate_denominator = gst_value_get_fraction_denominator(val);
    }

    gst_caps_unref(caps);
    gst_object_unref(srcPad);

    // use default from gst
    std::string capsSuffix = boost::lexical_cast<std::string>(framerate_numerator);
    capsSuffix += "/";
    capsSuffix += boost::lexical_cast<std::string>(framerate_denominator);

    if (v4l2util::isInterlaced(deviceStr()))
        capsSuffix +=", interlaced=true";

    capsSuffix += ", pixel-aspect-ratio=";
    capsSuffix += config_.pixelAspectRatio();

    capsStr << "video/x-raw-yuv, width=" << config_.captureWidth() << ", height="
        << config_.captureHeight()
        << ", framerate="
        << capsSuffix;
    LOG_DEBUG("V4l2src caps are " << capsStr.str());
    ret = gst_element_set_state(source_, GST_STATE_NULL);
    if (ret not_eq GST_STATE_CHANGE_SUCCESS)
        THROW_ERROR("Could not change v4l2src state to NULL");

    return capsStr.str();
}
コード例 #27
0
static void
cb_newpad (GstElement *element,
	   GstPad     *pad,
	   gpointer    data)
{
  GstCaps *caps;

  caps = gst_pad_get_caps (pad);
  try_to_plug (pad, caps);
  gst_caps_unref (caps);
}
コード例 #28
0
static void
mx_gst_graph_element_pad_create_info_txt (MxGstGraphElementPad *pad)
{
  MxGstGraphElementPadPrivate *priv = pad->priv;

  GString *txt = g_string_new("");
  g_string_append_printf(txt, "Name:  %s\n", 
      gst_pad_get_name(priv->pad));
  GString *caps_str = _print_caps(gst_pad_get_caps(priv->pad), TRUE);
  txt = g_string_append(txt, caps_str->str);  
  g_string_free(caps_str, TRUE);
  priv->details = g_string_free(txt, FALSE);
}
コード例 #29
0
ファイル: tag.c プロジェクト: supercatexpert/RhythmCat
static void rc_tag_gst_new_decoded_pad_cb(GstElement *decodebin, 
    GstPad *pad, gboolean last, RCTagDecodedPadData *data)
{
    GstCaps *caps;
    GstStructure *structure;
    const gchar *mimetype;
    gboolean cancel = FALSE;
    GstPad *sink_pad;
    caps = gst_pad_get_caps(pad);
    /* we get "ANY" caps for text/plain files etc. */
    if(gst_caps_is_empty(caps) || gst_caps_is_any(caps))
    {
        rc_debug_module_print(module_name, "Decoded pad with no caps or "
            "any caps. This file is boring.");
        cancel = TRUE;
        data->non_audio_flag = TRUE;
    }
    else
    {
        sink_pad = gst_element_get_static_pad(data->fakesink, "sink");
        gst_pad_link(pad, sink_pad);
        gst_object_unref(sink_pad);
        /* Is this pad audio? */
        structure = gst_caps_get_structure(caps, 0);
        mimetype = gst_structure_get_name(structure);
        if(g_str_has_prefix(mimetype, "audio/x-raw"))
        {
            rc_debug_module_print(module_name,
                "Got decoded audio pad of type %s", mimetype);
            data->audio_flag = TRUE;
        }
        else if(g_str_has_prefix(mimetype, "video/"))
        {
            rc_debug_module_print(module_name,
                "Got decoded video pad of type %s", mimetype);
            data->video_flag = TRUE;
        }
        else
        {
            rc_debug_module_print(module_name,
                "Got decoded pad of non-audio type %s", mimetype);
            data->non_audio_flag = TRUE;
        }
    }
    gst_caps_unref(caps);
    /* If this is non-audio, cancel the operation.
     * This seems to cause some deadlocks with video files, so only do it
     * when we get no/any caps.
     */
    if(cancel) gst_element_set_state(data->pipeline, GST_STATE_NULL);
}
コード例 #30
0
ファイル: cmplx.c プロジェクト: DanAnkers/libgstiq
static GstFlowReturn gst_iqcmplx_chain(GstPad *pad, GstBuffer *buf)
{
	Gst_iqcmplx *cmplx;
	GstCaps *caps;

	cmplx = GST_IQCMPLX(gst_pad_get_parent(pad));

	caps = gst_pad_get_caps(cmplx->srcpad);
	gst_buffer_set_caps(buf, caps);
	gst_caps_unref(caps);
	gst_pad_push(cmplx->srcpad, buf);
	gst_object_unref(cmplx);
	return GST_FLOW_OK;
}