Ejemplo n.º 1
0
static void
gst_base_audio_src_fixate (GstBaseSrc * bsrc, GstCaps * caps)
{
  GstStructure *s;
  gint width, depth;

  s = gst_caps_get_structure (caps, 0);

  /* fields for all formats */
  gst_structure_fixate_field_nearest_int (s, "rate", 44100);
  gst_structure_fixate_field_nearest_int (s, "channels", 2);
  gst_structure_fixate_field_nearest_int (s, "width", 16);

  /* fields for int */
  if (gst_structure_has_field (s, "depth")) {
    gst_structure_get_int (s, "width", &width);
    /* round width to nearest multiple of 8 for the depth */
    depth = GST_ROUND_UP_8 (width);
    gst_structure_fixate_field_nearest_int (s, "depth", depth);
  }
  if (gst_structure_has_field (s, "signed"))
    gst_structure_fixate_field_boolean (s, "signed", TRUE);
  if (gst_structure_has_field (s, "endianness"))
    gst_structure_fixate_field_nearest_int (s, "endianness", G_BYTE_ORDER);
}
Ejemplo n.º 2
0
/* this function is a bit of a last resort */
static GstCaps *
gst_v4l2src_fixate (GstBaseSrc * basesrc, GstCaps * caps)
{
  GstStructure *structure;
  gint i;

  GST_DEBUG_OBJECT (basesrc, "fixating caps %" GST_PTR_FORMAT, caps);

  caps = gst_caps_make_writable (caps);

  for (i = 0; i < gst_caps_get_size (caps); ++i) {
    structure = gst_caps_get_structure (caps, i);

    /* We are fixating to a reasonable 320x200 resolution
       and the maximum framerate resolution for that size */
    if (gst_structure_has_field (structure, "width"))
      gst_structure_fixate_field_nearest_int (structure, "width", 320);

    if (gst_structure_has_field (structure, "height"))
      gst_structure_fixate_field_nearest_int (structure, "height", 200);

    if (gst_structure_has_field (structure, "framerate"))
      gst_structure_fixate_field_nearest_fraction (structure, "framerate",
          G_MAXINT, 1);

    if (gst_structure_has_field (structure, "format"))
      gst_structure_fixate_field (structure, "format");
  }

  GST_DEBUG_OBJECT (basesrc, "fixated caps %" GST_PTR_FORMAT, caps);

  caps = GST_BASE_SRC_CLASS (parent_class)->fixate (basesrc, caps);

  return caps;
}
Ejemplo n.º 3
0
static GstCaps *
gst_inter_video_src_fixate (GstBaseSrc * src, GstCaps * caps)
{
  GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (src);
  GstStructure *structure;

  GST_DEBUG_OBJECT (intervideosrc, "fixate");

  caps = gst_caps_make_writable (caps);

  structure = gst_caps_get_structure (caps, 0);

  gst_structure_fixate_field_nearest_int (structure, "width", 320);
  gst_structure_fixate_field_nearest_int (structure, "height", 240);
  gst_structure_fixate_field_nearest_fraction (structure, "framerate", 30, 1);
  if (gst_structure_has_field (structure, "pixel-aspect-ratio"))
    gst_structure_fixate_field_nearest_fraction (structure,
        "pixel-aspect-ratio", 1, 1);
  if (gst_structure_has_field (structure, "color-matrix"))
    gst_structure_fixate_field_string (structure, "color-matrix", "sdtv");
  if (gst_structure_has_field (structure, "chroma-site"))
    gst_structure_fixate_field_string (structure, "chroma-site", "mpeg2");

  if (gst_structure_has_field (structure, "interlaced"))
    gst_structure_fixate_field_boolean (structure, "interlaced", FALSE);

  return caps;
}
Ejemplo n.º 4
0
static GstCaps *
gst_video_test_src_src_fixate (GstBaseSrc * bsrc, GstCaps * caps)
{
  GstStructure *structure;

  caps = gst_caps_make_writable (caps);

  structure = gst_caps_get_structure (caps, 0);

  gst_structure_fixate_field_nearest_int (structure, "width", 320);
  gst_structure_fixate_field_nearest_int (structure, "height", 240);
  gst_structure_fixate_field_nearest_fraction (structure, "framerate", 30, 1);
  if (gst_structure_has_field (structure, "pixel-aspect-ratio"))
    gst_structure_fixate_field_nearest_fraction (structure,
        "pixel-aspect-ratio", 1, 1);
  if (gst_structure_has_field (structure, "colorimetry"))
    gst_structure_fixate_field_string (structure, "colorimetry", "bt601");
  if (gst_structure_has_field (structure, "chroma-site"))
    gst_structure_fixate_field_string (structure, "chroma-site", "mpeg2");

  if (gst_structure_has_field (structure, "interlace-mode"))
    gst_structure_fixate_field_string (structure, "interlace-mode",
        "progressive");

  caps = GST_BASE_SRC_CLASS (parent_class)->fixate (bsrc, caps);

  return caps;
}
Ejemplo n.º 5
0
bool tcam_gst_fixate_caps (GstCaps* caps)
{
    if (caps == nullptr
        || gst_caps_is_empty(caps)
        || gst_caps_is_any(caps))
    {
        return FALSE;
    }

    GstStructure* structure = gst_caps_get_structure(caps, 0);

    if (gst_structure_has_field(structure, "width"))
    {
        gst_structure_fixate_field_nearest_int(structure, "width", G_MAXINT);
    }
    if (gst_structure_has_field(structure, "height"))
    {
        gst_structure_fixate_field_nearest_int(structure, "height", G_MAXINT);
    }
    if (gst_structure_has_field(structure, "framerate"))
    {
        gst_structure_fixate_field_nearest_fraction(structure, "framerate", G_MAXINT, 1);
    }

    return TRUE;
}
static gboolean
_cleanup_fields (const Properties * field_names, GstStructure * structure,
    GError ** error)
{
  guint i;

  for (i = 0; field_names[i].long_name; i++) {
    gboolean exists = FALSE;

    /* Move shortly named fields to longname variante */
    if (gst_structure_has_field (structure, field_names[i].short_name)) {
      exists = TRUE;

      if (gst_structure_has_field (structure, field_names[i].long_name)) {
        *error = g_error_new (GES_ERROR, 0, "Using short and long name"
            " at the same time for property: %s, which one should I use?!",
            field_names[i].long_name);

        return FALSE;
      } else {
        const GValue *val =
            gst_structure_get_value (structure, field_names[i].short_name);

        gst_structure_set_value (structure, field_names[i].long_name, val);
        gst_structure_remove_field (structure, field_names[i].short_name);
      }
    } else if (gst_structure_has_field (structure, field_names[i].long_name)) {
      exists = TRUE;
    }

    if (exists) {
      if (field_names[i].type == GST_TYPE_CLOCK_TIME) {
        if (_convert_to_clocktime (structure, field_names[i].long_name, 0) == 0) {
          *error = g_error_new (GES_ERROR, 0, "Could not convert"
              " %s to GstClockTime", field_names[i].long_name);

          return FALSE;
        }
      }
    }

    if (field_names[i].new_name
        && gst_structure_has_field (structure, field_names[i].long_name)) {
      const GValue *val =
          gst_structure_get_value (structure, field_names[i].long_name);

      gst_structure_set_value (structure, field_names[i].new_name, val);
      gst_structure_remove_field (structure, field_names[i].long_name);
    }
  }

  return TRUE;
}
Ejemplo n.º 7
0
static GstCaps *
fs_videoanyrate_transform_caps (GstBaseTransform *trans,
                                GstPadDirection direction,
                                GstCaps *caps,
                                GstCaps *filter)
{
    GstCaps *mycaps = gst_caps_copy (caps);
    guint i;

    if (gst_caps_get_size (mycaps) == 0)
        return mycaps;

    GST_DEBUG_OBJECT (trans, "Transforming caps");

    for (i = 0; i < gst_caps_get_size (mycaps); i++)
    {
        GstStructure *s;

        s = gst_caps_get_structure (mycaps, i);

        if (gst_structure_has_field (s, "framerate"))
            gst_structure_set (s,
                               "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
    }

    if (filter)
    {
        GstCaps *intersected = gst_caps_intersect (mycaps, filter);
        gst_caps_unref (mycaps);
        mycaps = intersected;
    }

    return mycaps;
}
Ejemplo n.º 8
0
GstStructure *
kms_stats_get_element_stats (GstStructure * stats)
{
  GstStructure *element_stats;
  const GValue *value;

  if (!gst_structure_has_field (stats, KMS_MEDIA_ELEMENT_FIELD)) {
    return NULL;
  }

  value = gst_structure_get_value (stats, KMS_MEDIA_ELEMENT_FIELD);

  if (!GST_VALUE_HOLDS_STRUCTURE (value)) {
    return NULL;
  }

  element_stats = (GstStructure *) gst_value_get_structure (value);

  if (g_strcmp0 (KMS_ELEMENT_STATS_STRUCT_NAME,
          gst_structure_get_name (element_stats)) != 0) {
    return NULL;
  }

  return element_stats;
}
Ejemplo n.º 9
0
static gboolean
gst_rtp_ssrc_demux_src_event (GstPad * pad, GstObject * parent,
                              GstEvent * event)
{
    GstRtpSsrcDemux *demux;
    const GstStructure *s;

    demux = GST_RTP_SSRC_DEMUX (parent);

    switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_CUSTOM_UPSTREAM:
    case GST_EVENT_CUSTOM_BOTH:
    case GST_EVENT_CUSTOM_BOTH_OOB:
        s = gst_event_get_structure (event);
        if (s && !gst_structure_has_field (s, "ssrc")) {
            GstRtpSsrcDemuxPad *dpad = find_demux_pad_for_pad (demux, pad);

            if (dpad) {
                GstStructure *ws;

                event = gst_event_make_writable (event);
                ws = gst_event_writable_structure (event);
                gst_structure_set (ws, "ssrc", G_TYPE_UINT, dpad->ssrc, NULL);
            }
        }
        break;
    default:
        break;
    }

    return gst_pad_event_default (pad, parent, event);
}
Ejemplo n.º 10
0
static void
gst_hlsdemux_test_set_input_data (const GstHlsDemuxTestCase * test_case,
    const GstHlsDemuxTestInputData * input, GstTestHTTPSrcInput * output)
{
  output->size = input->size;
  output->context = (gpointer) input;
  if (output->size == 0) {
    output->size = strlen ((gchar *) input->payload);
  }
  fail_unless (input->uri != NULL);
  if (g_str_has_suffix (input->uri, ".m3u8")) {
    output->response_headers = gst_structure_new ("response-headers",
        "Content-Type", G_TYPE_STRING, "application/vnd.apple.mpegurl", NULL);
  } else if (g_str_has_suffix (input->uri, ".ts")) {
    output->response_headers = gst_structure_new ("response-headers",
        "Content-Type", G_TYPE_STRING, "video/mp2t", NULL);
  }
  if (gst_structure_has_field (test_case->state, "requests")) {
    GstHlsDemuxTestAppendUriContext context =
        { g_quark_from_string ("requests"), input->uri };
    gst_structure_map_in_place (test_case->state, append_request_uri, &context);
  } else {
    GValue requests = G_VALUE_INIT;
    GValue uri_val = G_VALUE_INIT;

    g_value_init (&requests, GST_TYPE_ARRAY);
    g_value_init (&uri_val, G_TYPE_STRING);
    g_value_set_string (&uri_val, input->uri);
    gst_value_array_append_value (&requests, &uri_val);
    gst_structure_set_value (test_case->state, "requests", &requests);
    g_value_unset (&uri_val);
    g_value_unset (&requests);
  }
}
Ejemplo n.º 11
0
/* doesn't return a ref to the pixbuf */
static GdkPixbuf *
check_message_pixbuf (GstMessage * msg, const gchar * name, gint channels,
    gboolean has_alpha)
{
  GdkPixbuf *pixbuf;
  const GstStructure *s;

  fail_unless (gst_message_get_structure (msg) != NULL);

  s = gst_message_get_structure (msg);
  fail_unless_equals_string (gst_structure_get_name (s), name);

  fail_unless (gst_structure_has_field (s, "pixbuf"));
  fail_unless (gst_structure_has_field_typed (s, "pixel-aspect-ratio",
          GST_TYPE_FRACTION));
  pixbuf =
      GDK_PIXBUF (g_value_get_object (gst_structure_get_value (s, "pixbuf")));
  fail_unless (GDK_IS_PIXBUF (pixbuf));
  fail_unless_equals_int (gdk_pixbuf_get_n_channels (pixbuf), channels);
  fail_unless_equals_int (gdk_pixbuf_get_has_alpha (pixbuf), has_alpha);
  fail_unless_equals_int (gdk_pixbuf_get_width (pixbuf), 319);
  fail_unless_equals_int (gdk_pixbuf_get_height (pixbuf), 241);

  return pixbuf;
}
Ejemplo n.º 12
0
static GstCaps *
gst_video_scale_transform_caps (GstBaseTransform * trans,
    GstPadDirection direction, GstCaps * caps)
{
  GstCaps *ret;
  GstStructure *structure;

  /* this function is always called with a simple caps */
  g_return_val_if_fail (GST_CAPS_IS_SIMPLE (caps), NULL);

  GST_DEBUG_OBJECT (trans,
      "Transforming caps %" GST_PTR_FORMAT " in direction %s", caps,
      (direction == GST_PAD_SINK) ? "sink" : "src");

  ret = gst_caps_copy (caps);
  structure = gst_structure_copy (gst_caps_get_structure (ret, 0));

  gst_structure_set (structure,
      "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
      "height", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL);

  /* if pixel aspect ratio, make a range of it */
  if (gst_structure_has_field (structure, "pixel-aspect-ratio")) {
    gst_structure_set (structure, "pixel-aspect-ratio", GST_TYPE_FRACTION_RANGE,
        1, G_MAXINT, G_MAXINT, 1, NULL);
  }
  gst_caps_append_structure (ret, structure);

  GST_DEBUG_OBJECT (trans, "returning caps: %" GST_PTR_FORMAT, ret);

  return ret;
}
static GstCaps *
gst_video_scale_transform_caps (GstBaseTransform * trans,
                                GstPadDirection direction, GstCaps * caps, GstCaps * filter)
{
    GstCaps *ret;
    GstStructure *structure;
    GstCapsFeatures *features;
    gint i, n;

    GST_DEBUG_OBJECT (trans,
                      "Transforming caps %" GST_PTR_FORMAT " in direction %s", caps,
                      (direction == GST_PAD_SINK) ? "sink" : "src");

    ret = gst_caps_new_empty ();
    n = gst_caps_get_size (caps);
    for (i = 0; i < n; i++) {
        structure = gst_caps_get_structure (caps, i);
        features = gst_caps_get_features (caps, i);

        /* If this is already expressed by the existing caps
         * skip this structure */
        if (i > 0 && gst_caps_is_subset_structure_full (ret, structure, features))
            continue;

        /* make copy */
        structure = gst_structure_copy (structure);

        /* If the features are non-sysmem we can only do passthrough */
        if (!gst_caps_features_is_any (features)
                && gst_caps_features_is_equal (features,
                                               GST_CAPS_FEATURES_MEMORY_SYSTEM_MEMORY)) {
            gst_structure_set (structure, "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
                               "height", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL);

            /* if pixel aspect ratio, make a range of it */
            if (gst_structure_has_field (structure, "pixel-aspect-ratio")) {
                gst_structure_set (structure, "pixel-aspect-ratio",
                                   GST_TYPE_FRACTION_RANGE, 1, G_MAXINT, G_MAXINT, 1, NULL);
            }
        }
        gst_caps_append_structure_full (ret, structure,
                                        gst_caps_features_copy (features));
    }

    if (filter) {
        GstCaps *intersection;

        intersection =
            gst_caps_intersect_full (filter, ret, GST_CAPS_INTERSECT_FIRST);
        gst_caps_unref (ret);
        ret = intersection;
    }

    GST_DEBUG_OBJECT (trans, "returning caps: %" GST_PTR_FORMAT, ret);

    return ret;
}
Ejemplo n.º 14
0
static gboolean
validate_h263_codecs (CodecCap *codec_cap)
{
  /* we assume we have just one structure per caps as it should be */
  GstStructure *media_struct = gst_caps_get_structure (codec_cap->caps, 0);
  const gchar *name = gst_structure_get_name (media_struct);
  GstStructure *rtp_struct;
  const gchar *encoding_name;

  if (!name)
    return FALSE;

  /* let's check if it's h263 */
  if (strcmp (name, "video/x-h263"))
    return TRUE;

  /* If we don't have a h263version field, accept everything */
  if (!gst_structure_has_field (media_struct, "h263version"))
    return TRUE;

  rtp_struct = gst_caps_get_structure (codec_cap->rtp_caps, 0);
  if (!rtp_struct)
    return FALSE;

  encoding_name = gst_structure_get_string (rtp_struct, "encoding-name");

  /* If there is no encoding name, we have a problem, lets refuse it */
  if (!encoding_name)
    return FALSE;

  if (struct_field_has_line (media_struct, "h263version", "h263"))
  {
    /* baseline H263 can only be encoding name H263 or H263-1998 */

    if (strcmp (encoding_name, "H263") &&
        strcmp (encoding_name, "H263-1998"))
      return FALSE;
  }
  else if (struct_field_has_line (media_struct, "h263version", "h263p"))
  {
    /* has to be H263-1998 */
    if (strcmp (encoding_name, "H263-1998"))
      return FALSE;
  }
  else if (struct_field_has_line (media_struct, "h263version", "h263pp"))
  {
    /* has to be H263-2000 */
    if (strcmp (encoding_name, "H263-2000"))
      return FALSE;
  }

  /* if no h263version specified, we assume it's all h263 versions */

  return TRUE;
}
Ejemplo n.º 15
0
static GstCaps* gst_imx_blitter_video_transform_transform_caps(GstBaseTransform *transform, G_GNUC_UNUSED GstPadDirection direction, GstCaps *caps, GstCaps *filter)
{
	GstCaps *tmpcaps1, *tmpcaps2, *result;
	GstStructure *structure;
	gint i, n;

	tmpcaps1 = gst_caps_new_empty();
	n = gst_caps_get_size(caps);
	for (i = 0; i < n; i++)
	{
		structure = gst_caps_get_structure(caps, i);

		/* If this is already expressed by the existing caps
		 * skip this structure */
		if ((i > 0) && gst_caps_is_subset_structure(tmpcaps1, structure))
			continue;

		/* make copy */
		structure = gst_structure_copy(structure);
		gst_structure_set(
			structure,
			"width", GST_TYPE_INT_RANGE, 64, G_MAXINT,
			"height", GST_TYPE_INT_RANGE, 64, G_MAXINT,
			NULL
		);

		/* colorimetry is not supported by the videotransform element */
		gst_structure_remove_fields(structure, "format", "colorimetry", "chroma-site", NULL);

		/* if pixel aspect ratio, make a range of it */
		if (gst_structure_has_field(structure, "pixel-aspect-ratio"))
		{
			gst_structure_set(
				structure,
				"pixel-aspect-ratio", GST_TYPE_FRACTION_RANGE, 1, G_MAXINT, G_MAXINT, 1,
				NULL
			);
		}
		gst_caps_append_structure(tmpcaps1, structure);
	}

	/* filter the resulting caps if necessary */
	if (filter != NULL)
	{
		tmpcaps2 = gst_caps_intersect_full(filter, tmpcaps1, GST_CAPS_INTERSECT_FIRST);
		gst_caps_unref(tmpcaps1);
		tmpcaps1 = tmpcaps2;
	}

	result = tmpcaps1;

	GST_DEBUG_OBJECT(transform, "transformed %" GST_PTR_FORMAT " into %" GST_PTR_FORMAT, (gpointer)caps, (gpointer)result);

	return result;
}
Ejemplo n.º 16
0
static gboolean
gst_wavpack_dec_sink_set_caps (GstPad * pad, GstCaps * caps)
{
    GstWavpackDec *dec = GST_WAVPACK_DEC (gst_pad_get_parent (pad));
    GstStructure *structure = gst_caps_get_structure (caps, 0);

    /* Check if we can set the caps here already */
    if (gst_structure_get_int (structure, "channels", &dec->channels) &&
            gst_structure_get_int (structure, "rate", &dec->sample_rate) &&
            gst_structure_get_int (structure, "width", &dec->depth)) {
        GstCaps *caps;
        GstAudioChannelPosition *pos;

        caps = gst_caps_new_simple ("audio/x-raw-int",
                                    "rate", G_TYPE_INT, dec->sample_rate,
                                    "channels", G_TYPE_INT, dec->channels,
                                    "depth", G_TYPE_INT, dec->depth,
                                    "width", G_TYPE_INT, 32,
                                    "endianness", G_TYPE_INT, G_BYTE_ORDER,
                                    "signed", G_TYPE_BOOLEAN, TRUE, NULL);

        /* If we already have the channel layout set from upstream
         * take this */
        if (gst_structure_has_field (structure, "channel-positions")) {
            pos = gst_audio_get_channel_positions (structure);
            if (pos != NULL && dec->channels > 2) {
                GstStructure *new_str = gst_caps_get_structure (caps, 0);

                gst_audio_set_channel_positions (new_str, pos);
                dec->channel_mask =
                    gst_wavpack_get_channel_mask_from_positions (pos, dec->channels);
            }

            if (pos != NULL)
                g_free (pos);
        }

        GST_DEBUG_OBJECT (dec, "setting caps %" GST_PTR_FORMAT, caps);

        /* should always succeed */
        gst_pad_set_caps (dec->srcpad, caps);
        gst_caps_unref (caps);

        /* send GST_TAG_AUDIO_CODEC and GST_TAG_BITRATE tags before something
         * is decoded or after the format has changed */
        gst_wavpack_dec_post_tags (dec);
    }

    gst_object_unref (dec);

    return TRUE;
}
Ejemplo n.º 17
0
static GstPadProbeReturn
buffer_probe_cb (GstPad * pad, GstPadProbeInfo * info, gpointer user_data)
{
  GstBuffer *buffer = GST_PAD_PROBE_INFO_BUFFER (info);
  GstMapInfo map;

  gst_buffer_map (buffer, &map, GST_MAP_READ);

  if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_HEADER)) {
    GstCaps *caps;
    GstStructure *s;
    const GValue *sh;
    GArray *buffers;
    GstBuffer *buf;
    int i;
    gboolean found = FALSE;

    n_in_caps++;

    caps = gst_pad_get_current_caps (pad);
    s = gst_caps_get_structure (caps, 0);
    fail_unless (gst_structure_has_field (s, "streamheader"));
    sh = gst_structure_get_value (s, "streamheader");
    buffers = g_value_peek_pointer (sh);
    assert_equals_int (buffers->len, 3);

    for (i = 0; i < 3; ++i) {
      GValue *val;
      GstMapInfo map2;

      val = &g_array_index (buffers, GValue, i);
      buf = g_value_peek_pointer (val);
      fail_unless (GST_IS_BUFFER (buf));

      gst_buffer_map (buf, &map2, GST_MAP_READ);
      if (map2.size == map.size) {
        if (memcmp (map2.data, map.data, map.size) == 0) {
          found = TRUE;
        }
      }
      gst_buffer_unmap (buf, &map2);
    }
    fail_unless (found, "Did not find incoming HEADER buffer %p on caps",
        buffer);

    gst_caps_unref (caps);
  }
  gst_buffer_unmap (buffer, &map);

  return TRUE;
}
Ejemplo n.º 18
0
// only on the sink
gboolean
gst_rtp_sv3v_depay_setcaps (GstBaseRTPDepayload * filter, GstCaps * caps)
{

  GstStructure *structure = gst_caps_get_structure (caps, 0);
  gint clock_rate = 90000;      // default

  if (gst_structure_has_field (structure, "clock-rate"))
    gst_structure_get_int (structure, "clock-rate", &clock_rate);

  filter->clock_rate = clock_rate;

  return TRUE;
}
Ejemplo n.º 19
0
gboolean gst_goo_util_structure_is_parsed (GstStructure *structure) {
    if (gst_structure_has_field (structure, "parsed")) {
        gboolean parsed = FALSE;
        gst_structure_get_boolean (structure, "parsed", &parsed);
        if (parsed)
            return TRUE;
    }

    if (gst_structure_has_field (structure, "framed")) {
        gboolean framed = FALSE;
        gst_structure_get_boolean (structure, "framed", &framed);
        if (framed)
            return TRUE;
    }

    if (gst_structure_has_field (structure, "codec_data")) {
        const GValue *codec_data = NULL;
        codec_data = gst_structure_get_value (structure, "codec_data");
        if (codec_data != NULL)
            return TRUE;
    }

    return FALSE;
}
Ejemplo n.º 20
0
static gboolean
structure_is_subset (const GstStructure *st1, const GstStructure *st2)
{
        int i;

        for (i = 0; i < gst_structure_n_fields (st2); i++) {
                const gchar *name = gst_structure_nth_field_name (st2, i);

                if (!gst_structure_has_field(st1, name)) {
                        gupnp_dlna_debug ("    missing field %s", name);
                        return FALSE;
                }
        }

        return TRUE;
}
Ejemplo n.º 21
0
static void
set_appsink_caps (GstElement * appsink, const GstCaps * caps,
    KmsRecordingProfile profile)
{
  GstStructure *str;
  GstCaps *sinkcaps;

  sinkcaps = gst_caps_copy (caps);

  str = gst_caps_get_structure (sinkcaps, 0);

  if (str == NULL) {
    GST_ERROR_OBJECT (appsink,
        "Can not get caps at index 0 from %" GST_PTR_FORMAT, sinkcaps);
    goto end;
  }

  if (!gst_structure_has_field (str, "framerate")) {
    GST_DEBUG_OBJECT (appsink, "No framerate in caps %" GST_PTR_FORMAT,
        sinkcaps);
  } else {
    GST_DEBUG_OBJECT (appsink, "Removing framerate from caps %" GST_PTR_FORMAT,
        sinkcaps);
    gst_structure_remove_field (str, "framerate");
  }

  switch (profile) {
    case KMS_RECORDING_PROFILE_WEBM:
    case KMS_RECORDING_PROFILE_WEBM_VIDEO_ONLY:
      /* Allow renegotiation of width and height because webmmux supports it */
      gst_structure_remove_field (str, "width");
      gst_structure_remove_field (str, "height");
      break;
    default:
      /* No to allow height and width renegotiation */
      break;
  }

  GST_DEBUG_OBJECT (appsink, "Setting sink caps %" GST_PTR_FORMAT, sinkcaps);
  g_object_set (appsink, "caps", sinkcaps, NULL);

end:

  gst_caps_unref (sinkcaps);
}
Ejemplo n.º 22
0
/* Clamp the framerate in a caps structure to be a smaller range then
 * [1...max_rate], otherwise return false */
static gboolean
gst_video_max_rate_clamp_structure (GstStructure * s, gint maxrate,
    gint * min_num, gint * min_denom, gint * max_num, gint * max_denom)
{
  gboolean ret = FALSE;

  if (!gst_structure_has_field (s, "framerate")) {
    /* No framerate field implies any framerate, clamping would result in
     * [1..max_rate] so not a real subset */
    goto out;
  } else {
    const GValue *v;
    GValue intersection = { 0, };
    GValue clamp = { 0, };
    gint tmp_num, tmp_denom;

    g_value_init (&clamp, GST_TYPE_FRACTION_RANGE);
    gst_value_set_fraction_range_full (&clamp, 0, 1, maxrate, 1);

    v = gst_structure_get_value (s, "framerate");
    ret = gst_value_intersect (&intersection, v, &clamp);
    g_value_unset (&clamp);

    if (!ret)
      goto out;

    gst_value_fraction_get_extremes (&intersection,
        min_num, min_denom, max_num, max_denom);

    gst_value_fraction_get_extremes (v,
        &tmp_num, &tmp_denom, max_num, max_denom);

    if (gst_util_fraction_compare (*max_num, *max_denom, maxrate, 1) > 0) {
      *max_num = maxrate;
      *max_denom = 1;
    }

    gst_structure_take_value (s, "framerate", &intersection);
  }

out:
  return ret;
}
Ejemplo n.º 23
0
static gboolean
gst_ffmpegcsp_structure_is_alpha (GstStructure * s)
{
  const gchar *name;

  name = gst_structure_get_name (s);

  if (g_str_equal (name, "video/x-raw-rgb")) {
    return gst_structure_has_field (s, "alpha_mask");
  } else if (g_str_equal (name, "video/x-raw-yuv")) {
    guint32 fourcc;

    if (!gst_structure_get_fourcc (s, "format", &fourcc))
      return FALSE;

    return (fourcc == GST_MAKE_FOURCC ('A', 'Y', 'U', 'V'));
  }

  return FALSE;
}
/* For each raw video structure, adds a variant with format unset */
static gboolean
fix_video_caps_format(GstCapsFeatures *f, GstStructure *s, gpointer user_data)
{
    GstCaps *ret = GST_CAPS(user_data);
    OWR_UNUSED(f);

    gst_caps_append_structure(ret, gst_structure_copy(s));

    /* Don't mess with non-raw structures */
    if (!gst_structure_has_name(s, "video/x-raw"))
        goto done;

    if (gst_structure_has_field(s, "format")) {
        GstStructure *tmp = gst_structure_copy(s);
        gst_structure_remove_field(tmp, "format");
        gst_caps_append_structure(ret, tmp);
    }

done:
    return TRUE;
}
Ejemplo n.º 25
0
static void
mpeg_video_parse_check_caps (guint version, guint8 * seq, gint size)
{
  GstCaps *caps;
  GstStructure *s;
  GstBuffer *buf;
  const GValue *val;
  GstMapInfo map;

  ctx_headers[0].data = seq;
  ctx_headers[0].size = size;
  /* parser does not really care that mpeg1 and mpeg2 frame data
   * should be a bit different */
  caps = gst_parser_test_get_output_caps (mpeg2_iframe, sizeof (mpeg2_iframe),
      NULL);
  fail_unless (caps != NULL);

  /* Check that the negotiated caps are as expected */
  /* When codec_data is present, parser assumes that data is version 4 */
  GST_LOG ("mpegvideo output caps: %" GST_PTR_FORMAT, caps);
  s = gst_caps_get_structure (caps, 0);
  fail_unless (gst_structure_has_name (s, "video/mpeg"));
  fail_unless_structure_field_int_equals (s, "mpegversion", version);
  fail_unless_structure_field_int_equals (s, "width", 32);
  fail_unless_structure_field_int_equals (s, "height", 24);
  fail_unless (gst_structure_has_field (s, "codec_data"));

  /* check codec-data in more detail */
  val = gst_structure_get_value (s, "codec_data");
  fail_unless (val != NULL);
  buf = gst_value_get_buffer (val);
  fail_unless (buf != NULL);
  gst_buffer_map (buf, &map, GST_MAP_READ);
  /* codec-data = header - GOP */
  assert_equals_int (map.size, size - 8);
  fail_unless (memcmp (map.data, seq, map.size) == 0);
  gst_buffer_unmap (buf, &map);

  gst_caps_unref (caps);
}
Ejemplo n.º 26
0
static gboolean
gst_rtp_pt_demux_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
  GstRtpPtDemux *demux;
  const GstStructure *s;

  demux = GST_RTP_PT_DEMUX (parent);

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_CUSTOM_UPSTREAM:
    case GST_EVENT_CUSTOM_BOTH:
    case GST_EVENT_CUSTOM_BOTH_OOB:
      s = gst_event_get_structure (event);
      if (s && !gst_structure_has_field (s, "payload")) {
        GSList *walk;

        GST_OBJECT_LOCK (demux);
        for (walk = demux->srcpads; walk; walk = g_slist_next (walk)) {
          GstRtpPtDemuxPad *dpad = (GstRtpPtDemuxPad *) walk->data;

          if (dpad->pad == pad) {
            GstStructure *ws;

            event =
                GST_EVENT_CAST (gst_mini_object_make_writable
                (GST_MINI_OBJECT_CAST (event)));
            ws = gst_event_writable_structure (event);
            gst_structure_set (ws, "payload", G_TYPE_UINT, dpad->pt, NULL);
            break;
          }
        }
        GST_OBJECT_UNLOCK (demux);
      }
      break;
    default:
      break;
  }

  return gst_pad_event_default (pad, parent, event);
}
Ejemplo n.º 27
0
/* return the caps that can be used on out_pad given in_caps on in_pad */
static gboolean
gst_video_rate_transformcaps (GstPad * in_pad, GstCaps * in_caps,
    GstPad * out_pad, GstCaps ** out_caps)
{
  GstCaps *intersect;
  const GstCaps *in_templ;
  gint i;
  GSList *extra_structures = NULL;
  GSList *iter;

  in_templ = gst_pad_get_pad_template_caps (in_pad);
  intersect = gst_caps_intersect (in_caps, in_templ);

  /* all possible framerates are allowed */
  for (i = 0; i < gst_caps_get_size (intersect); i++) {
    GstStructure *structure;

    structure = gst_caps_get_structure (intersect, i);

    if (gst_structure_has_field (structure, "framerate")) {
      GstStructure *copy_structure;

      copy_structure = gst_structure_copy (structure);
      gst_structure_set (copy_structure,
          "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
      extra_structures = g_slist_append (extra_structures, copy_structure);
    }
  }

  /* append the extra structures */
  for (iter = extra_structures; iter != NULL; iter = g_slist_next (iter)) {
    gst_caps_append_structure (intersect, (GstStructure *) iter->data);
  }
  g_slist_free (extra_structures);

  *out_caps = intersect;

  return TRUE;
}
Ejemplo n.º 28
0
static gboolean
gst_rtp_ssrc_demux_src_event (GstPad * pad, GstEvent * event)
{
  GstRtpSsrcDemux *demux;
  const GstStructure *s;

  demux = GST_RTP_SSRC_DEMUX (gst_pad_get_parent (pad));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_CUSTOM_UPSTREAM:
    case GST_EVENT_CUSTOM_BOTH:
    case GST_EVENT_CUSTOM_BOTH_OOB:
      s = gst_event_get_structure (event);
      if (s && !gst_structure_has_field (s, "ssrc")) {
        GSList *walk;

        for (walk = demux->srcpads; walk; walk = g_slist_next (walk)) {
          GstRtpSsrcDemuxPad *dpad = (GstRtpSsrcDemuxPad *) walk->data;

          if (dpad->rtp_pad == pad || dpad->rtcp_pad == pad) {
            event =
                GST_EVENT_CAST (gst_mini_object_make_writable
                (GST_MINI_OBJECT_CAST (event)));
            gst_structure_set (event->structure, "ssrc", G_TYPE_UINT,
                dpad->ssrc, NULL);
            break;
          }
        }
      }
      break;
    default:
      break;
  }

  gst_object_unref (demux);

  return gst_pad_event_default (pad, event);
}
Ejemplo n.º 29
0
static void
gst_opus_dec_negotiate (GstOpusDec * dec, const GstAudioChannelPosition * pos)
{
  GstCaps *caps = gst_pad_get_allowed_caps (GST_AUDIO_DECODER_SRC_PAD (dec));
  GstStructure *s;
  GstAudioInfo info;

  if (caps) {
    caps = gst_caps_truncate (caps);
    caps = gst_caps_make_writable (caps);
    s = gst_caps_get_structure (caps, 0);

    if (gst_structure_has_field (s, "rate"))
      gst_structure_fixate_field_nearest_int (s, "rate", dec->sample_rate);
    else
      gst_structure_set (s, "rate", G_TYPE_INT, dec->sample_rate, NULL);
    gst_structure_get_int (s, "rate", &dec->sample_rate);

    if (gst_structure_has_field (s, "channels"))
      gst_structure_fixate_field_nearest_int (s, "channels", dec->n_channels);
    else
      gst_structure_set (s, "channels", G_TYPE_INT, dec->n_channels, NULL);
    gst_structure_get_int (s, "channels", &dec->n_channels);

    gst_caps_unref (caps);
  }

  if (dec->n_channels == 0) {
    GST_DEBUG_OBJECT (dec, "Using a default of 2 channels");
    dec->n_channels = 2;
    pos = NULL;
  }

  if (dec->sample_rate == 0) {
    GST_DEBUG_OBJECT (dec, "Using a default of 48kHz sample rate");
    dec->sample_rate = 48000;
  }

  GST_INFO_OBJECT (dec, "Negotiated %d channels, %d Hz", dec->n_channels,
      dec->sample_rate);

  /* pass valid order to audio info */
  if (pos) {
    memcpy (dec->opus_pos, pos, sizeof (pos[0]) * dec->n_channels);
    gst_audio_channel_positions_to_valid_order (dec->opus_pos, dec->n_channels);
  }

  /* set up source format */
  gst_audio_info_init (&info);
  gst_audio_info_set_format (&info, GST_AUDIO_FORMAT_S16,
      dec->sample_rate, dec->n_channels, pos ? dec->opus_pos : NULL);
  gst_audio_decoder_set_output_format (GST_AUDIO_DECODER (dec), &info);

  /* but we still need the opus order for later reordering */
  if (pos) {
    memcpy (dec->opus_pos, pos, sizeof (pos[0]) * dec->n_channels);
    gst_audio_channel_positions_to_valid_order (dec->opus_pos, dec->n_channels);
  } else {
    dec->opus_pos[0] = GST_AUDIO_CHANNEL_POSITION_INVALID;
  }

  dec->info = info;
}
Ejemplo n.º 30
0
static GstFlowReturn
mpegtsmux_create_streams (MpegTsMux * mux)
{
  GstFlowReturn ret = GST_FLOW_OK;
  GSList *walk = mux->collect->data;

  /* Create the streams */
  while (walk) {
    GstCollectData *c_data = (GstCollectData *) walk->data;
    MpegTsPadData *ts_data = (MpegTsPadData *) walk->data;
    gchar *name = NULL;

    walk = g_slist_next (walk);

    if (ts_data->prog_id == -1) {
      name = GST_PAD_NAME (c_data->pad);
      if (mux->prog_map != NULL && gst_structure_has_field (mux->prog_map,
              name)) {
        gint idx;
        gboolean ret = gst_structure_get_int (mux->prog_map, name, &idx);
        if (!ret) {
          GST_ELEMENT_ERROR (mux, STREAM, MUX,
              ("Reading program map failed. Assuming default"), (NULL));
          idx = DEFAULT_PROG_ID;
        }
        if (idx < 0 || idx >= MAX_PROG_NUMBER) {
          GST_DEBUG_OBJECT (mux, "Program number %d associate with pad %s out "
              "of range (max = %d); DEFAULT_PROGRAM = %d is used instead",
              idx, name, MAX_PROG_NUMBER, DEFAULT_PROG_ID);
          idx = DEFAULT_PROG_ID;
        }
        ts_data->prog_id = idx;
      } else {
        ts_data->prog_id = DEFAULT_PROG_ID;
      }
    }

    ts_data->prog = mux->programs[ts_data->prog_id];
    if (ts_data->prog == NULL) {
      ts_data->prog = tsmux_program_new (mux->tsmux);
      if (ts_data->prog == NULL)
        goto no_program;
      tsmux_set_pmt_interval (ts_data->prog, mux->pmt_interval);
      mux->programs[ts_data->prog_id] = ts_data->prog;
    }

    if (ts_data->stream == NULL) {
      ret = mpegtsmux_create_stream (mux, ts_data, c_data->pad);
      if (ret != GST_FLOW_OK)
        goto no_stream;
    }
  }

  return GST_FLOW_OK;
no_program:
  GST_ELEMENT_ERROR (mux, STREAM, MUX,
      ("Could not create new program"), (NULL));
  return GST_FLOW_ERROR;
no_stream:
  GST_ELEMENT_ERROR (mux, STREAM, MUX,
      ("Could not create handler for stream"), (NULL));
  return ret;
}