Exemple #1
0
/*
 * \brief Build the sink of SU's pipeline, if this is a reidrection, modify the caps to pass it to the SP pipeline
 * \param pipeline the pipepline of the stream
 * \param input the las element of the pipeline, (avenc_mp4 or capsfilter) as we built our own source
 * \param caps the caps built from the SDP to replace if you have MPEG-4 or J2K video
 * \return GstElement* the last element added to the pipeline (RAW: return input, MPEG: mpeg4videoparse, J2k:capsfilter)
 */
static GstElement *handle_redirection_SU_pipeline ( GstElement *pipeline, GstCaps *caps, GstElement *input){

	GstStructure *video_caps = gst_caps_get_structure( caps , 0 );

	g_debug("handle_redirection_SU_pipeline: checks and handles Service User's pipeline for redirection");

	/* in case MPEG4 video type has been detected */
	if  (gst_structure_has_name( video_caps, "video/mpeg")){

		/* Add the MPEG-4 parser in SU pipeline */
		GstElement *parser 	= gst_element_factory_make_log ("mpeg4videoparse", MPEG4PARSER_NAME);
		if ( !parser )
			return NULL;

		g_debug("add %s to pipeline", MPEG4PARSER_NAME);

		gst_bin_add(GST_BIN(pipeline),parser);

		if ( !gst_element_link_log(input, parser))
			return NULL;

		input = parser;

	}
	/* in case J2K video type has been detected */
	else if  ( g_strv_contains ( J2K_STR_NAMES, gst_structure_get_name(video_caps))){

		GstElement *capsfilter = gst_element_factory_make_log("capsfilter", CAPSFITER_J2K_NAME );

		GstCaps *caps_jpeg2000 = get_rtpj2kpay_allowed_caps();

		/* Put the source in the pipeline */
		g_object_set (capsfilter, "caps", caps_jpeg2000 , NULL);

		g_debug("add %s to pipeline", CAPSFITER_J2K_NAME );

		gst_bin_add(GST_BIN(pipeline),capsfilter);

		if ( !gst_element_link_log(input,capsfilter )){
			g_critical("JPEG2000 format can only be %s", gst_caps_to_string( caps_jpeg2000 ) );
			return NULL;
		}

		input = capsfilter;
	}

	/*
	 * Run a new typefind to update caps in order to succeed to run a last typefind in the pipeline of the SP of the
	 * redirection. Update caps in consequence
	 */
	video_caps = type_detection(GST_BIN(pipeline), input, NULL);
	if( !video_caps )
		return NULL;

	gst_caps_append_structure( caps , gst_structure_copy ( video_caps ) );
	gst_caps_remove_structure ( caps, 0 ) ;

	return input;

}
static GstCaps *
gst_video_rate_divider_transform_caps (GstBaseTransform * trans,
                               GstPadDirection direction, GstCaps * caps)
{
  GstVideoRateDivider *videorate = GST_VIDEO_RATE_DIVIDER (trans);
  GstCaps *ret;
  GstStructure *s, *s2;
  gint rate_numerator, rate_denominator;

  ret = gst_caps_copy (caps);

  /* Any caps simply return */
  if (gst_caps_is_any (caps))
    {
      GST_DEBUG_OBJECT (trans,
        "transform caps: %" GST_PTR_FORMAT " (direction = %s) ANY",
                        caps, get_direction_name(direction));
      return ret;
    }

  s = gst_caps_get_structure (ret, 0);
  gst_structure_get_fraction (s, "framerate",
                              &rate_numerator, &rate_denominator);
  GST_DEBUG_OBJECT (trans,
      "transform caps: %" GST_PTR_FORMAT " (direction = %s framerate = %d/%d)",
                    caps, get_direction_name(direction), rate_numerator, rate_denominator);

  s2 = gst_structure_copy (s);

  if (direction == GST_PAD_SINK)
    {
      /* correct input flow framerate */
      /* store inpute framerate */
      videorate->from_rate_numerator = rate_numerator;
      videorate->from_rate_denominator = rate_denominator;

      gst_caps_remove_structure (ret, 0);
      gst_structure_set (s2, "framerate", GST_TYPE_FRACTION,
          rate_numerator, rate_denominator * videorate->factor, NULL);
      gst_caps_merge_structure (ret, s2);
    }

  return ret;
}
static void
list_codecs (void)
{
    GstCaps *l, *caps;
    GstStructure *st;
    guint i, len;
    gchar *tmpstr, *desc;

    caps = gst_caps_new_empty ();

    g_print ("Available container formats:\n");
    l = gst_caps_list_container_formats (GST_RANK_NONE);
    len = gst_caps_get_size (l);
    for (i = 0; i < len; i++) {
        st = gst_caps_steal_structure (l, 0);
        gst_caps_append_structure (caps, st);

        tmpstr = gst_caps_to_string (caps);
        desc = gst_pb_utils_get_codec_description (caps);
        g_print ("  %s - %s\n", desc, tmpstr);
        g_free (tmpstr);
        if (desc)
            g_free (desc);
        gst_caps_remove_structure (caps, 0);
    }
    g_print ("\n");
    gst_caps_unref (l);

    g_print ("Available video codecs:\n");
    l = gst_caps_list_video_encoding_formats (GST_RANK_NONE);
    len = gst_caps_get_size (l);
    for (i = 0; i < len; i++) {
        st = gst_caps_steal_structure (l, 0);
        gst_caps_append_structure (caps, st);

        tmpstr = gst_caps_to_string (caps);
        desc = gst_pb_utils_get_codec_description (caps);
        g_print ("  %s - %s\n", desc, tmpstr);
        g_free (tmpstr);
        if (desc)
            g_free (desc);
        gst_caps_remove_structure (caps, 0);
    }
    g_print ("\n");
    gst_caps_unref (l);

    g_print ("Available audio codecs:\n");
    l = gst_caps_list_audio_encoding_formats (GST_RANK_NONE);
    len = gst_caps_get_size (l);
    for (i = 0; i < len; i++) {
        st = gst_caps_steal_structure (l, 0);
        gst_caps_append_structure (caps, st);

        tmpstr = gst_caps_to_string (caps);
        desc = gst_pb_utils_get_codec_description (caps);
        g_print ("  %s - %s\n", desc, tmpstr);
        g_free (tmpstr);
        if (desc)
            g_free (desc);
        gst_caps_remove_structure (caps, 0);
    }
    g_print ("\n");
    gst_caps_unref (l);

    gst_caps_unref (caps);
}
Exemple #4
0
/**
 * \brief Build the sink of SU's pipeline, if this is a reidrection, modify the caps to pass it to the SP pipeline
 * \param pipeline the pipepline of the stream
 * \param bus the bus associated to the pipeline
 * \param bust_watch_id the watch associated to the bus
 * \param loop the GMainLoop
 * \param input the last element of the pipeline, (avenc_mp4 or capsfilter) as we built our own source
 * \param channel_entry the channel of the SU in the device's channel Table
 * \param cmdline teh gst_sink from the configuration vivoe-mib.conf
 * \param redirect the redirection data (mapping SU's channel Index --> SP's videoFormatIndex)
 * \param caps the caps built from the SDP, may be replace if this is a redirection
 * \return GstElement* the last element added to the pipeline (appsink or a displayer like x(v)imagesink)
 */
static GstElement* addSink_SU( 	GstElement 					*pipeline, 		GstBus 		*bus,
								guint 						bus_watch_id, 	GstElement 	*input,
								struct channelTable_entry 	*channel_entry, gchar 		*cmdline,
								redirect_data 				*redirect, 		GstCaps 	*caps,
								struct videoFormatTable_entry *video_stream_info
								){

	GError 		*error 				= NULL; /* an Object to save errors when they occurs */
	GstElement 	*sink 				= NULL; /* to return last element of pipeline */

	g_debug("addSink_SU: add sink element to Service User's pipeline");

	/*
	 * Classic case
	 */
	if ( !redirect ){

		sink  = gst_parse_bin_from_description (cmdline,
												TRUE,
												&error);
		if ( !sink )
			return NULL;

		g_object_set(sink, "name", "gst_sink", NULL);

	}
	else /* redirection case */
	{

		/* add app sink */
		sink = gst_element_factory_make_log( "appsink" , APPSINK_NAME );

		if ( !sink)
			return NULL;

		/* connect a callback on each sample received */
		g_object_set (G_OBJECT (sink), "emit-signals", TRUE, "sync", TRUE, NULL);
		g_signal_connect (sink, "new-sample",
			G_CALLBACK (on_new_sample_from_sink), redirect->pipeline_SP);

		input = parse_conf_for_redirection( pipeline, input, redirect);

		if ( !input )
			return NULL;

	}

	/* detect the caps of the video after the gstreamer pipeline given by the user in gst_sink command line in vivoe-mib.conf */
	GstStructure *video_caps;
	video_caps = type_detection(GST_BIN(pipeline), input, NULL);
	if ( !video_caps )
		return NULL;

	gst_caps_append_structure( caps , gst_structure_copy ( video_caps ) );
	gst_caps_remove_structure ( caps, 0 ) ;
	GstElement *last = handle_redirection_SU_pipeline(pipeline, caps, input);
	/* update input element to link to appsink */
	input = last ;

	/* Create the sink */
    if(sink == NULL){
       g_critical ( "error cannot create element for: %s","sink");
	   return NULL;
    }

	g_debug("add %s to SU pipeline", GST_ELEMENT_NAME(sink));

	/* add sink to pipeline */
	if ( !gst_bin_add(GST_BIN (pipeline), sink )){
		g_critical("Unable to add %s to pipeline", gst_element_get_name(sink));
		return NULL;
	}

	/*
	 * Then, after sink has been added, handle the ROI
	 * To do so, we need to copy to the videoFormat the value of channelRoiOrigin and channelRoiExtent parameters
	 */
	if (handle_roi ( pipeline , video_stream_info, channel_entry ) && redirect ){

			redirect->roi_presence = TRUE;

			GstElement *typefind_roi = type_detection_element_for_roi(GST_BIN( pipeline) );

			if ( !typefind_roi ){
				g_critical("Failed to create pipeline");
				return NULL;
			}

			/*
			 * If the videoFormat is a ROI, create the branch from RTP elment, branch 1 is UDP element, branch 2 is typefind_roi element
			 * rtp and updsink will be link there.
			 * Otherwise juist link udp source to payloader.
			 *
			 */

			if ( video_stream_info->videoFormatType == roi ){
				if ( !create_branch_in_pipeline( pipeline , input , sink , typefind_roi ) ){
					g_critical("Failed to create pipeline");
					return NULL;
				}

			}

	}else{

		/* we link the elements together */
		if ( !gst_element_link_log (input, sink))
			return NULL;
	}

	return sink;
}
Exemple #5
0
static GstFlowReturn
gst_pnmdec_chain (GstPad * pad, GstBuffer * data)
{
  GstPnmdec *s = GST_PNMDEC (gst_pad_get_parent (pad));
  GstPad *src = gst_element_get_static_pad (GST_ELEMENT (s), "src");
  GstBuffer *buf;
  GstCaps *caps = NULL;
  GstFlowReturn r = GST_FLOW_OK;
  guint8 offset = 0;

  if (!(s->mngr.info.fields & GST_PNM_INFO_FIELDS_ALL)) {
    switch (gst_pnm_info_mngr_scan (&s->mngr, GST_BUFFER_DATA (data),
            GST_BUFFER_SIZE (data))) {
      case GST_PNM_INFO_MNGR_RESULT_FAILED:
        gst_buffer_unref (data);
        r = GST_FLOW_ERROR;
        goto out;
      case GST_PNM_INFO_MNGR_RESULT_READING:
        gst_buffer_unref (data);
        r = GST_FLOW_OK;
        goto out;
      case GST_PNM_INFO_MNGR_RESULT_FINISHED:
        offset = s->mngr.data_offset;
        caps = gst_caps_copy (gst_pad_get_pad_template_caps (src));
        switch (s->mngr.info.type) {
          case GST_PNM_TYPE_BITMAP_RAW:
          case GST_PNM_TYPE_BITMAP_ASCII:
          case GST_PNM_TYPE_GRAYMAP_RAW:
          case GST_PNM_TYPE_GRAYMAP_ASCII:
            gst_caps_remove_structure (caps, 0);
            s->size = s->mngr.info.width * s->mngr.info.height * 1;
            break;
          case GST_PNM_TYPE_PIXMAP_RAW:
          case GST_PNM_TYPE_PIXMAP_ASCII:
            gst_caps_remove_structure (caps, 1);
            s->size = s->mngr.info.width * s->mngr.info.height * 3;
            break;
        }
        gst_caps_set_simple (caps,
            "width", G_TYPE_INT, s->mngr.info.width,
            "height", G_TYPE_INT, s->mngr.info.height, "framerate",
            GST_TYPE_FRACTION, 0, 1, NULL);
        if (!gst_pad_set_caps (src, caps)) {
          gst_caps_unref (caps);
          r = GST_FLOW_ERROR;
          goto out;
        }
        gst_caps_unref (caps);
    }
  }

  if (offset == GST_BUFFER_SIZE (data)) {
    r = GST_FLOW_OK;
    goto out;
  }

  /* If we got the whole image, just push the buffer. */
  if (GST_BUFFER_SIZE (data) - offset == s->size) {
    buf = gst_buffer_create_sub (data, offset, s->size);
    gst_buffer_unref (data);
    memset (&s->mngr, 0, sizeof (GstPnmInfoMngr));
    s->size = 0;
    gst_buffer_set_caps (buf, GST_PAD_CAPS (src));
    r = gst_pnmdec_push (s, src, buf);
    goto out;
  }

  /* We didn't get the whole image. */
  if (!s->buf) {
    s->buf = gst_buffer_create_sub (data, offset,
        GST_BUFFER_SIZE (data) - offset);
  } else {
    buf = gst_buffer_span (s->buf, 0, data,
        GST_BUFFER_SIZE (s->buf) + GST_BUFFER_SIZE (data) - offset);
    gst_buffer_unref (s->buf);
    s->buf = buf;
  }
  if (!s->buf) {
    r = GST_FLOW_ERROR;
    goto out;
  }

  /* Do we now have the full image? If yes, push. */
  if (GST_BUFFER_SIZE (s->buf) == s->size) {
    gst_buffer_set_caps (s->buf, GST_PAD_CAPS (src));
    r = gst_pnmdec_push (s, src, s->buf);
    s->buf = NULL;
    memset (&s->mngr, 0, sizeof (GstPnmInfoMngr));
    s->size = 0;
  }

out:
  gst_object_unref (src);
  gst_object_unref (s);

  return r;
}
Exemple #6
0
void Caps::removeStructure(uint index)
{
    gst_caps_remove_structure(object<GstCaps>(), index);
}
Exemple #7
0
static GstCaps *
gst_video_rate_transform_caps (GstBaseTransform * trans,
    GstPadDirection direction, GstCaps * caps)
{
  GstVideoRate *videorate = GST_VIDEO_RATE (trans);
  GstCaps *ret;
  GstStructure *s, *s2;
  GstStructure *s3 = NULL;
  int maxrate = g_atomic_int_get (&videorate->max_rate);

  /* Should always be called with simple caps */
  g_return_val_if_fail (GST_CAPS_IS_SIMPLE (caps), NULL);

  ret = gst_caps_copy (caps);

  s = gst_caps_get_structure (ret, 0);
  s2 = gst_structure_copy (s);

  if (videorate->drop_only) {
    gint min_num = 0, min_denom = 1;
    gint max_num = G_MAXINT, max_denom = 1;

    /* Clamp the caps to our maximum rate as the first caps if possible */
    if (!gst_video_max_rate_clamp_structure (s, maxrate,
            &min_num, &min_denom, &max_num, &max_denom)) {
      min_num = 0;
      min_denom = 1;
      max_num = maxrate;
      max_denom = 1;

      /* clamp wouldn't be a real subset of 1..maxrate, in this case the sink
       * caps should become [1..maxrate], [1..maxint] and the src caps just
       * [1..maxrate].  In case there was a caps incompatibility things will
       * explode later as appropriate :)
       *
       * In case [X..maxrate] == [X..maxint], skip as we'll set it later
       */
      if (direction == GST_PAD_SRC && maxrate != G_MAXINT)
        gst_structure_set (s, "framerate", GST_TYPE_FRACTION_RANGE,
            min_num, min_denom, maxrate, 1, NULL);
      else
        gst_caps_remove_structure (ret, 0);
    }

    if (direction == GST_PAD_SRC) {
      /* We can accept anything as long as it's at least the minimal framerate
       * the the sink needs */
      gst_structure_set (s2, "framerate", GST_TYPE_FRACTION_RANGE,
          min_num, min_denom, G_MAXINT, 1, NULL);

      /* Also allow unknown framerate, if it isn't already */
      if (min_num != 0 || min_denom != 1) {
        s3 = gst_structure_copy (s);
        gst_structure_set (s3, "framerate", GST_TYPE_FRACTION, 0, 1, NULL);
      }
    } else if (max_num != 0 || max_denom != 1) {
      /* We can provide everything upto the maximum framerate at the src */
      gst_structure_set (s2, "framerate", GST_TYPE_FRACTION_RANGE,
          0, 1, max_num, max_denom, NULL);
    }
  } else if (direction == GST_PAD_SINK) {
    gint min_num = 0, min_denom = 1;
    gint max_num = G_MAXINT, max_denom = 1;

    if (!gst_video_max_rate_clamp_structure (s, maxrate,
            &min_num, &min_denom, &max_num, &max_denom))
      gst_caps_remove_structure (ret, 0);

    gst_structure_set (s2, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1,
        maxrate, 1, NULL);
  } else {
    /* set the framerate as a range */
    gst_structure_set (s2, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1,
        G_MAXINT, 1, NULL);
  }

  gst_caps_merge_structure (ret, s2);
  if (s3 != NULL)
    gst_caps_merge_structure (ret, s3);

  return ret;
}