void
kms_pointer_detector_get_property (GObject * object, guint property_id,
    GValue * value, GParamSpec * pspec)
{
  KmsPointerDetector *pointerdetector = KMS_POINTER_DETECTOR (object);

  GST_DEBUG_OBJECT (pointerdetector, "get_property");

  switch (property_id) {
    case PROP_NUM_REGIONS:
      g_value_set_int (value, pointerdetector->numOfRegions);
      break;
    case PROP_WINDOW_SCALE:
      g_value_set_int (value, pointerdetector->windowScaleRef);
      break;
    case PROP_SHOW_DEBUG_INFO:
      g_value_set_boolean (value, pointerdetector->show_debug_info);
      break;
    case PROP_WINDOWS_LAYOUT:
      if (pointerdetector->buttonsLayout == NULL) {
        pointerdetector->buttonsLayout = gst_structure_new_empty ("windows");
      }
      g_value_set_boxed (value, pointerdetector->buttonsLayout);
      break;
    case PROP_MESSAGE:
      g_value_set_boolean (value, pointerdetector->putMessage);
      break;
    case PROP_SHOW_WINDOWS_LAYOUT:
      g_value_set_boolean (value, pointerdetector->show_windows_layout);
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
      break;
  }
}
Пример #2
0
static void
kms_image_overlay_get_property (GObject * object, guint property_id,
    GValue * value, GParamSpec * pspec)
{
  KmsImageOverlay *imageoverlay = KMS_IMAGE_OVERLAY (object);

  GST_DEBUG_OBJECT (imageoverlay, "get_property");

  GST_OBJECT_LOCK (imageoverlay);

  switch (property_id) {
    case PROP_SHOW_DEBUG_INFO:
      g_value_set_boolean (value, imageoverlay->priv->show_debug_info);
      break;
    case PROP_IMAGE_TO_OVERLAY:
      if (imageoverlay->priv->image_to_overlay == NULL) {
        imageoverlay->priv->image_to_overlay =
            gst_structure_new_empty ("image_to_overlay");
      }
      g_value_set_boxed (value, imageoverlay->priv->image_to_overlay);
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
      break;
  }
  GST_OBJECT_UNLOCK (imageoverlay);
}
Пример #3
0
static GstStructure *
kms_element_stats_impl (KmsElement * self, gchar * selector)
{
  GstStructure *stats;

  stats = gst_structure_new_empty ("stats");

  if (self->priv->stats_enabled) {
    GstStructure *e_stats;

    /* Video and audio latencies are measured in nano seconds. They */
    /* are such an small values so there is no harm in casting them */
    /* to uint64 even we might lose a bit of preccision.            */

    e_stats = gst_structure_new (KMS_ELEMENT_STATS_STRUCT_NAME,
        "input-video-latency", G_TYPE_UINT64, (guint64) self->priv->stats.vi,
        "input-audio-latency", G_TYPE_UINT64, (guint64) self->priv->stats.ai,
        NULL);

    gst_structure_set (stats, KMS_MEDIA_ELEMENT_FIELD, GST_TYPE_STRUCTURE,
        e_stats, NULL);

    gst_structure_free (e_stats);
  }

  return stats;
}
Пример #4
0
/* This function is called when new metadata is discovered in the stream */
static void tags_cb (GstElement *playbin, gint stream, CustomData *data) {
  /* We are possibly in a GStreamer working thread, so we notify the main
   * thread of this event through a message in the bus */
  gst_element_post_message (playbin,
    gst_message_new_application (GST_OBJECT (playbin),
      gst_structure_new_empty ("tags-changed")));
}
Пример #5
0
static GByteArray *
setup_test_variables (GstHlsDemuxTestInputData * inputTestData,
    GstAdaptiveDemuxTestExpectedOutput * outputTestData,
    GstHlsDemuxTestCase * hlsTestCase,
    GstAdaptiveDemuxTestCase * engineTestData, guint segment_size)
{
  GByteArray *mpeg_ts = NULL;

  if (segment_size) {
    mpeg_ts = generate_transport_stream ((segment_size));
    fail_unless (mpeg_ts != NULL);
    for (guint itd = 0; inputTestData[itd].uri; ++itd) {
      if (g_str_has_suffix (inputTestData[itd].uri, ".ts")) {
        inputTestData[itd].payload = mpeg_ts->data;
      }
    }
    for (guint otd = 0; outputTestData[otd].name; ++otd) {
      outputTestData[otd].expected_data = mpeg_ts->data;
      engineTestData->output_streams =
          g_list_append (engineTestData->output_streams, &outputTestData[otd]);
    }
  }
  hlsTestCase->input = inputTestData;
  hlsTestCase->state = gst_structure_new_empty (__FUNCTION__);
  return mpeg_ts;
}
Пример #6
0
/**
 * send_underrun_message
 *
 * Sends UNDERRUN message to the bus.
 */
static void send_underrun_message(ProgressBuffer* element)
{
    GstStructure *s = gst_structure_new_empty(PB_MESSAGE_UNDERRUN);
    GstMessage *msg = gst_message_new_application(GST_OBJECT(element), s);

    gst_element_post_message(GST_ELEMENT(element), msg);
}
Пример #7
0
GstStructure *
gst_multiudpsink_get_stats (GstMultiUDPSink * sink, const gchar * host,
    gint port)
{
  GstUDPClient *client;
  GstStructure *result = NULL;
  GstUDPClient udpclient;
  GList *find;

  udpclient.host = (gchar *) host;
  udpclient.port = port;

  g_mutex_lock (&sink->client_lock);

  find = g_list_find_custom (sink->clients, &udpclient,
      (GCompareFunc) client_compare);
  if (!find)
    goto not_found;

  GST_DEBUG_OBJECT (sink, "stats for client with host %s, port %d", host, port);

  client = (GstUDPClient *) find->data;

  result = gst_structure_new_empty ("multiudpsink-stats");

  gst_structure_set (result,
      "bytes-sent", G_TYPE_UINT64, client->bytes_sent,
      "packets-sent", G_TYPE_UINT64, client->packets_sent,
      "connect-time", G_TYPE_UINT64, client->connect_time,
      "disconnect-time", G_TYPE_UINT64, client->disconnect_time, NULL);

  g_mutex_unlock (&sink->client_lock);

  return result;

  /* ERRORS */
not_found:
  {
    g_mutex_unlock (&sink->client_lock);
    GST_WARNING_OBJECT (sink, "client with host %s, port %d not found",
        host, port);
    /* Apparently (see comment in gstmultifdsink.c) returning NULL from here may
     * confuse/break python bindings */
    return gst_structure_new_empty ("multiudpsink-stats");
  }
}
static GstStructure *
get_structure_from_roi (std::shared_ptr<RegionOfInterest> roi)
{
  GstStructure *roiStructure, *configRoiSt;
  std::shared_ptr<RegionOfInterestConfig> config;
  int pointCount = 0;

  roiStructure = gst_structure_new_empty (roi->getId().c_str() );

  if (roiStructure == NULL) {
    throw KurentoException (MEDIA_OBJECT_ILLEGAL_PARAM_ERROR,
                            "Invalid roi name");
  }

  for (std::shared_ptr<RelativePoint> point : roi->getPoints() ) {
    GstStructure *pointSt;
    std::string name = "point" + std::to_string (pointCount ++);

    pointSt = gst_structure_new (name.c_str(),
                                 "x", G_TYPE_FLOAT, point->getX(),
                                 "y", G_TYPE_FLOAT, point->getY(),
                                 NULL);

    gst_structure_set (roiStructure,
                       name.c_str(), GST_TYPE_STRUCTURE, pointSt,
                       NULL);

    gst_structure_free (pointSt);
  }

  config = roi->getRegionOfInterestConfig();
  configRoiSt = gst_structure_new ("config",
                                   "id", G_TYPE_STRING, roi->getId().c_str(),
                                   "occupancy_level_min", G_TYPE_INT, config->getOccupancyLevelMin(),
                                   "occupancy_level_med", G_TYPE_INT, config->getOccupancyLevelMed(),
                                   "occupancy_level_max", G_TYPE_INT, config->getOccupancyLevelMax(),
                                   "occupancy_num_frames_to_event", G_TYPE_INT,
                                   config->getOccupancyNumFramesToEvent(),
                                   "fluidity_level_min", G_TYPE_INT, config->getFluidityLevelMin(),
                                   "fluidity_level_med", G_TYPE_INT, config->getFluidityLevelMed(),
                                   "fluidity_level_max", G_TYPE_INT, config->getFluidityLevelMax(),
                                   "fluidity_num_frames_to_event", G_TYPE_INT,
                                   config->getFluidityNumFramesToEvent(),
                                   "send_optical_flow_event", G_TYPE_BOOLEAN, config->getSendOpticalFlowEvent(),
                                   "optical_flow_num_frames_to_event", G_TYPE_INT,
                                   config->getOpticalFlowNumFramesToEvent(),
                                   "optical_flow_num_frames_to_reset", G_TYPE_INT,
                                   config->getOpticalFlowNumFramesToReset(),
                                   "optical_flow_angle_offset", G_TYPE_INT, config->getOpticalFlowAngleOffset(),
                                   NULL);
  gst_structure_set (roiStructure,
                     "config", GST_TYPE_STRUCTURE, configRoiSt,
                     NULL);

  gst_structure_free (configRoiSt);

  return roiStructure;
}
void
PointerDetectorFilter::clearWindows()
{
  GstStructure *buttonsLayout;

  buttonsLayout = gst_structure_new_empty  ("buttonsLayout");
  g_object_set (G_OBJECT (this->pointerDetector), WINDOWS_LAYOUT, buttonsLayout, NULL);
  gst_structure_free (buttonsLayout);
}
Пример #10
0
GstQuery *
gst_droid_query_new_video_color_format ()
{
  GstQuery *query;
  GstStructure *structure;

  structure = gst_structure_new_empty (GST_DROID_VIDEO_COLOR_FORMAT_QUERY_NAME);

  query = gst_query_new_custom (GST_QUERY_CUSTOM, structure);

  return query;
}
Пример #11
0
static void
append_codec_to_array (GArray *array, const char *codec)
{
  GValue v = G_VALUE_INIT;
  GstStructure *s;

  g_value_init (&v, GST_TYPE_STRUCTURE);
  s = gst_structure_new_empty (codec);
  gst_value_set_structure (&v, s);
  gst_structure_free (s);
  g_array_append_val (array, v);
}
Пример #12
0
/*
 * This function sends a dummy event to force blocked probe to be called
 */
static void
send_dummy_event (GstPad * pad, const gchar * name)
{
  GstElement *parent = gst_pad_get_parent_element (pad);

  if (parent == NULL) {
    return;
  }

  if (GST_PAD_IS_SINK (pad)) {
    gst_pad_send_event (pad,
        gst_event_new_custom (GST_EVENT_TYPE_DOWNSTREAM |
            GST_EVENT_TYPE_SERIALIZED, gst_structure_new_empty (name)));
  } else {
    gst_pad_send_event (pad,
        gst_event_new_custom (GST_EVENT_TYPE_UPSTREAM |
            GST_EVENT_TYPE_SERIALIZED, gst_structure_new_empty (name)));
  }

  g_object_unref (parent);
}
Пример #13
0
/**
 * gst_rtsp_token_new_empty:
 *
 * Create a new empty Authorization token.
 *
 * Returns: (transfer full): a new empty authorization token.
 */
GstRTSPToken *
gst_rtsp_token_new_empty (void)
{
  GstRTSPTokenImpl *token;
  GstStructure *s;

  s = gst_structure_new_empty ("GstRTSPToken");
  g_return_val_if_fail (s != NULL, NULL);

  token = g_slice_new0 (GstRTSPTokenImpl);
  gst_rtsp_token_init (token, s);

  return (GstRTSPToken *) token;
}
Пример #14
0
static GstStructure *
kms_element_get_e2e_latency_stats (KmsRecorderEndpoint * self, gchar * selector)
{
  gpointer key, value;
  GHashTableIter iter;
  GstStructure *stats;

  stats = gst_structure_new_empty ("e2e-latencies");

  KMS_ELEMENT_LOCK (self);

  g_hash_table_iter_init (&iter, self->priv->stats.avg_e2e);

  while (g_hash_table_iter_next (&iter, &key, &value)) {
    StreamE2EAvgStat *avg = value;
    GstStructure *pad_latency;
    gchar *padname, *id = key;

    if (selector != NULL && ((g_strcmp0 (selector, AUDIO_STREAM_NAME) == 0 &&
                avg->type != KMS_MEDIA_TYPE_AUDIO) ||
            (g_strcmp0 (selector, VIDEO_STREAM_NAME) == 0 &&
                avg->type != KMS_MEDIA_TYPE_VIDEO))) {
      continue;
    }

    padname = kms_element_get_padname_from_id (self, id);

    if (padname == NULL) {
      GST_WARNING_OBJECT (self, "No pad identified by %s", id);
      continue;
    }

    /* Video and audio latencies are measured in nano seconds. They */
    /* are such an small values so there is no harm in casting them */
    /* to uint64 even we might lose a bit of preccision.            */

    pad_latency = gst_structure_new (padname, "type", G_TYPE_STRING,
        (avg->type ==
            KMS_MEDIA_TYPE_AUDIO) ? AUDIO_STREAM_NAME : VIDEO_STREAM_NAME,
        "avg", G_TYPE_UINT64, (guint64) avg->avg, NULL);

    gst_structure_set (stats, padname, GST_TYPE_STRUCTURE, pad_latency, NULL);
    gst_structure_free (pad_latency);
    g_free (padname);
  }

  KMS_ELEMENT_UNLOCK (self);

  return stats;
}
Пример #15
0
/**
 * gst_video_overlay_prepare_window_handle:
 * @overlay: a #GstVideoOverlay which does not yet have an Window handle set
 *
 * This will post a "prepare-window-handle" element message on the bus
 * to give applications an opportunity to call
 * gst_video_overlay_set_window_handle() before a plugin creates its own
 * window.
 *
 * This function should only be used by video overlay plugin developers.
 */
void
gst_video_overlay_prepare_window_handle (GstVideoOverlay * overlay)
{
  GstStructure *s;
  GstMessage *msg;

  g_return_if_fail (overlay != NULL);
  g_return_if_fail (GST_IS_VIDEO_OVERLAY (overlay));

  GST_LOG_OBJECT (GST_OBJECT (overlay), "prepare window handle");
  s = gst_structure_new_empty ("prepare-window-handle");
  msg = gst_message_new_element (GST_OBJECT (overlay), s);
  gst_element_post_message (GST_ELEMENT (overlay), msg);
}
Пример #16
0
/**
 * gst_event_new_tag:
 * @taglist: (transfer full): metadata list. The event will take ownership
 *     of the taglist.
 *
 * Generates a metadata tag event from the given @taglist.
 *
 * The scope of the taglist specifies if the taglist applies to the
 * complete medium or only to this specific stream. As the tag event
 * is a sticky event, elements should merge tags received from
 * upstream with a given scope with their own tags with the same
 * scope and create a new tag event from it.
 *
 * Returns: (transfer full): a new #GstEvent
 */
GstEvent *
gst_event_new_tag (GstTagList * taglist)
{
  GstStructure *s;
  GValue val = G_VALUE_INIT;
  const gchar *names[] = { "GstTagList-stream", "GstTagList-global" };

  g_return_val_if_fail (taglist != NULL, NULL);

  s = gst_structure_new_empty (names[gst_tag_list_get_scope (taglist)]);
  g_value_init (&val, GST_TYPE_TAG_LIST);
  g_value_take_boxed (&val, taglist);
  gst_structure_id_take_value (s, GST_QUARK (TAGLIST), &val);
  return gst_event_new_custom (GST_EVENT_TAG, s);
}
Пример #17
0
/**
 * gst_tracer_record_new:
 * @name: name of new record, must end on ".class".
 * @firstfield: name of first field to set
 * @...: additional arguments

 *
 * Create a new tracer record. The record instance can be used to efficiently
 * log entries using gst_tracer_record_log().
 *
 * The @name without the ".class" suffix will be used for the log records.
 * There must be fields for each value that gets logged where the field name is
 * the value name. The field must be a #GstStructure describing the value. The
 * sub structure must contain a field called 'type' of %G_TYPE_GTYPE that
 * contains the GType of the value. The resulting #GstTracerRecord will take
 * ownership of the field structures.
 *
 * The way to deal with optional values is to log an additional boolean before
 * the optional field, that if %TRUE signals that the optional field is valid
 * and %FALSE signals that the optional field should be ignored. One must still
 * log a placeholder value for the optional field though. Please also note, that
 * pointer type values must not be NULL - the underlying serialisation can not
 * handle that right now.
 *
 * > Please note that this is still under discussion and subject to change.
 *
 * Returns: (transfer full): a new #GstTracerRecord
 */
GstTracerRecord *
gst_tracer_record_new (const gchar * name, const gchar * firstfield, ...)
{
  GstTracerRecord *self;
  GstStructure *structure;
  va_list varargs;
  gchar *err = NULL;
  GType type;
  GQuark id;

  va_start (varargs, firstfield);
  structure = gst_structure_new_empty (name);

  while (firstfield) {
    GValue val = { 0, };

    id = g_quark_from_string (firstfield);
    type = va_arg (varargs, GType);

    /* all fields passed here must be GstStructures which we take over */
    if (type != GST_TYPE_STRUCTURE) {
      GST_WARNING ("expected field of type GstStructure, but %s is %s",
          firstfield, g_type_name (type));
    }

    G_VALUE_COLLECT_INIT (&val, type, varargs, G_VALUE_NOCOPY_CONTENTS, &err);
    if (G_UNLIKELY (err)) {
      g_critical ("%s", err);
      break;
    }
    /* see boxed_proxy_collect_value */
    val.data[1].v_uint &= ~G_VALUE_NOCOPY_CONTENTS;
    gst_structure_id_take_value (structure, id, &val);

    firstfield = va_arg (varargs, gchar *);
  }
  va_end (varargs);

  self = g_object_new (GST_TYPE_TRACER_RECORD, NULL);

  /* Clear floating flag */
  gst_object_ref_sink (self);

  self->spec = structure;
  gst_tracer_record_build_format (self);

  return self;
}
Пример #18
0
static void
do_post_message_pre (GstStatsTracer * self, guint64 ts, GstElement * elem,
    GstMessage * msg)
{
  GstElementStats *stats = get_element_stats (self, elem);
  const GstStructure *msg_s = gst_message_get_structure (msg);
  GstStructure *s =
      msg_s ? (GstStructure *) msg_s : gst_structure_new_empty ("dummy");

  stats->last_ts = ts;
  /* FIXME: work out whether using NULL instead of a dummy struct would work */
  gst_tracer_record_log (tr_message, (guint64) (guintptr) g_thread_self (), ts,
      stats->index, GST_MESSAGE_TYPE_NAME (msg), s);
  if (s != msg_s)
    gst_structure_free (s);
}
Пример #19
0
static void
gst_droidcamsrc_dev_shutter_callback (void *user)
{
  GstDroidCamSrcDev *dev = (GstDroidCamSrcDev *) user;
  GstDroidCamSrc *src = GST_DROIDCAMSRC (GST_PAD_PARENT (dev->imgsrc->pad));

  GST_DEBUG_OBJECT (src, "dev shutter callback");

  g_rec_mutex_lock (dev->lock);

  if (!dev->img->image_start_sent) {
    gst_droidcamsrc_post_message (src,
        gst_structure_new_empty (GST_DROIDCAMSRC_CAPTURE_START));
    dev->img->image_start_sent = TRUE;
  }

  g_rec_mutex_unlock (dev->lock);
}
static GstCaps *
gst_gdk_pixbuf_dec_get_capslist (GstCaps * filter)
{
  GSList *slist;
  GSList *slist0;
  GstCaps *capslist = NULL;
  GstCaps *return_caps = NULL;
  GstCaps *tmpl_caps;

  capslist = gst_caps_new_empty ();
  slist0 = gdk_pixbuf_get_formats ();

  for (slist = slist0; slist; slist = g_slist_next (slist)) {
    GdkPixbufFormat *pixbuf_format;
    char **mimetypes;
    char **mimetype;

    pixbuf_format = slist->data;
    mimetypes = gdk_pixbuf_format_get_mime_types (pixbuf_format);

    for (mimetype = mimetypes; *mimetype; mimetype++) {
      gst_caps_append_structure (capslist, gst_structure_new_empty (*mimetype));
    }
    g_strfreev (mimetypes);
  }
  g_slist_free (slist0);

  tmpl_caps =
      gst_static_caps_get (&gst_gdk_pixbuf_dec_sink_template.static_caps);
  return_caps = gst_caps_intersect (capslist, tmpl_caps);

  gst_caps_unref (tmpl_caps);
  gst_caps_unref (capslist);

  if (filter && return_caps) {
    GstCaps *temp;

    temp = gst_caps_intersect (return_caps, filter);
    gst_caps_unref (return_caps);
    return_caps = temp;
  }

  return return_caps;
}
Пример #21
0
static void
kms_face_detector_send_event (KmsFaceDetector * facedetector,
                              GstVideoFrame * frame)
{
    GstStructure *faces;
    GstStructure *timestamp;
    GstEvent *e;
    gint i;

    faces = gst_structure_new_empty ("faces");

    timestamp = gst_structure_new ("time",
                                   "pts", G_TYPE_UINT64, GST_BUFFER_PTS (frame->buffer),
                                   "dts", G_TYPE_UINT64, GST_BUFFER_DTS (frame->buffer), NULL);
    gst_structure_set (faces, "timestamp", GST_TYPE_STRUCTURE, timestamp, NULL);
    gst_structure_free (timestamp);

    for (i = 0;
            i <
            (facedetector->priv->pFaceRectSeq ? facedetector->priv->
             pFaceRectSeq->total : 0); i++) {
        CvRect *r;
        GstStructure *face;
        gchar *id = NULL;

        r = (CvRect *) cvGetSeqElem (facedetector->priv->pFaceRectSeq, i);
        face = gst_structure_new ("face",
                                  "x", G_TYPE_UINT, (guint) (r->x * facedetector->priv->resize_factor),
                                  "y", G_TYPE_UINT, (guint) (r->y * facedetector->priv->resize_factor),
                                  "width", G_TYPE_UINT,
                                  (guint) (r->width * facedetector->priv->resize_factor), "height",
                                  G_TYPE_UINT, (guint) (r->height * facedetector->priv->resize_factor),
                                  NULL);

        id = g_strdup_printf ("%d", i);
        gst_structure_set (faces, id, GST_TYPE_STRUCTURE, face, NULL);
        gst_structure_free (face);
        g_free (id);
    }

    /* post a faces detected event to src pad */
    e = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, faces);
    gst_pad_push_event (facedetector->base.element.srcpad, e);
}
Пример #22
0
static void
dec_counter (GstPlayRegion *PlayRegion)
{

  if (PlayRegion->prerolled)
    return;

  if (g_atomic_int_dec_and_test (&PlayRegion->counter)) {
    /* all probes blocked and no-more-pads signaled, post
     * message on the bus. */
	PlayRegion->prerolled = TRUE;

	g_print ("fire application message\n");

    gst_bus_post (PlayRegion->bus, gst_message_new_application (
          GST_OBJECT_CAST (PlayRegion->pipeline),
          gst_structure_new_empty ("ExPrerolled")));
  }
}
Пример #23
0
GstStructure *
get_roi_structure (const gchar * id)
{
  int pointCount = 0;
  GstStructure *roiStructure, *configRoiSt;

  roiStructure = gst_structure_new_empty (id);
  for (pointCount = 0; pointCount < 4; pointCount++) {
    GstStructure *pointSt;
    gchar *name;

    name = g_strdup_printf ("point%d", pointCount);
    pointSt = gst_structure_new (name,
        "x", G_TYPE_INT, 10 + pointCount,
        "y", G_TYPE_INT, 10 + pointCount, NULL);
    gst_structure_set (roiStructure, name, GST_TYPE_STRUCTURE, pointSt, NULL);
    gst_structure_free (pointSt);
    g_free (name);
  }
  configRoiSt = gst_structure_new ("config",
      "id", G_TYPE_STRING, id,
      "occupancy_level_min", G_TYPE_INT, 10,
      "occupancy_level_med", G_TYPE_INT, 35,
      "occupancy_level_max", G_TYPE_INT, 65,
      "occupancy_num_frames_to_event", G_TYPE_INT,
      5,
      "fluidity_level_min", G_TYPE_INT, 10,
      "fluidity_level_med", G_TYPE_INT, 35,
      "fluidity_level_max", G_TYPE_INT, 65,
      "fluidity_num_frames_to_event", G_TYPE_INT, 5,
      "send_optical_flow_event", G_TYPE_BOOLEAN, FALSE,
      "optical_flow_num_frames_to_event", G_TYPE_INT,
      3,
      "optical_flow_num_frames_to_reset", G_TYPE_INT,
      3, "optical_flow_angle_offset", G_TYPE_INT, 0, NULL);
  gst_structure_set (roiStructure, "config", GST_TYPE_STRUCTURE, configRoiSt,
      NULL);
  gst_structure_free (configRoiSt);
  return roiStructure;
}
Пример #24
0
static GstStructure *
create_rtxsenders (RtxSender * senders, guint senders_num)
{
  GstStructure *recv_pt_map =
      gst_structure_new_empty ("application/x-rtp-pt-map");

  for (gint i = 0; i < senders_num; ++i) {
    gchar *master_pt_str;
    gchar *master_caps_str;
    GstStructure *send_pt_map;

    senders[i].h = gst_harness_new ("rtprtxsend");
    senders[i].master_ssrc = 1234567 + i;
    senders[i].rtx_ssrc = 7654321 + i;
    senders[i].master_pt = 80 + i;
    senders[i].rtx_pt = 20 + i;
    senders[i].seqnum = i * 1000;
    senders[i].expected_rtx_packets = 0;

    master_pt_str = g_strdup_printf ("%u", senders[i].master_pt);
    master_caps_str = g_strdup_printf ("application/x-rtp, "
        "media = (string)video, payload = (int)%u, "
        "ssrc = (uint)%u, clock-rate = (int)90000, "
        "encoding-name = (string)RAW",
        senders[i].master_pt, senders[i].master_ssrc);

    send_pt_map = gst_structure_new ("application/x-rtp-pt-map",
        master_pt_str, G_TYPE_UINT, senders[i].rtx_pt, NULL);
    gst_structure_set (recv_pt_map,
        master_pt_str, G_TYPE_UINT, senders[i].rtx_pt, NULL);

    g_object_set (senders[i].h->element, "payload-type-map", send_pt_map, NULL);
    gst_harness_set_src_caps_str (senders[i].h, master_caps_str);

    gst_structure_free (send_pt_map);
    g_free (master_pt_str);
    g_free (master_caps_str);
  }
  return recv_pt_map;
}
CrowdDetectorFilterImpl::CrowdDetectorFilterImpl (const
    boost::property_tree::ptree &config,
    std::shared_ptr<MediaPipeline> mediaPipeline,
    const std::vector<std::shared_ptr<RegionOfInterest>> &rois)  : FilterImpl (
        config, std::dynamic_pointer_cast<MediaPipelineImpl>
        (mediaPipeline) )
{
  GstStructure *roisStructure;

  g_object_set (element, "filter-factory", "crowddetector", NULL);

  g_object_get (G_OBJECT (element), "filter", &crowdDetector, NULL);

  if (crowdDetector == NULL) {
    throw KurentoException (MEDIA_OBJECT_NOT_AVAILABLE,
                            "Media Object not available");
  }

  roisStructure = gst_structure_new_empty  ("Rois");

  for (auto roi : rois) {
    GstStructure *roiStructureAux = get_structure_from_roi (roi);

    gst_structure_set (roisStructure,
                       roi->getId().c_str(), GST_TYPE_STRUCTURE,
                       roiStructureAux,
                       NULL);

    gst_structure_free (roiStructureAux);
  }

  g_object_set (G_OBJECT (crowdDetector), ROIS_PARAM, roisStructure, NULL);
  gst_structure_free (roisStructure);

  bus_handler_id = 0;
  // There is no need to reference crowddetector because its life cycle is the same as the filter life cycle
  g_object_unref (crowdDetector);
}
static void
kms_crowd_detector_get_property (GObject * object, guint property_id,
    GValue * value, GParamSpec * pspec)
{
  KmsCrowdDetector *crowddetector = KMS_CROWD_DETECTOR (object);

  GST_DEBUG_OBJECT (crowddetector, "get_property");

  switch (property_id) {
    case PROP_SHOW_DEBUG_INFO:
      g_value_set_boolean (value, crowddetector->priv->show_debug_info);
      break;
    case PROP_ROIS:
      if (crowddetector->priv->rois == NULL) {
        crowddetector->priv->rois = gst_structure_new_empty ("rois");
      }
      g_value_set_boxed (value, crowddetector->priv->rois);
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
      break;
  }
}
static gboolean
gst_video_scale_set_info (GstVideoFilter * filter, GstCaps * in,
                          GstVideoInfo * in_info, GstCaps * out, GstVideoInfo * out_info)
{
    GstVideoScale *videoscale = GST_VIDEO_SCALE (filter);
    gint from_dar_n, from_dar_d, to_dar_n, to_dar_d;

    if (!gst_util_fraction_multiply (in_info->width,
                                     in_info->height, in_info->par_n, in_info->par_d, &from_dar_n,
                                     &from_dar_d)) {
        from_dar_n = from_dar_d = -1;
    }

    if (!gst_util_fraction_multiply (out_info->width,
                                     out_info->height, out_info->par_n, out_info->par_d, &to_dar_n,
                                     &to_dar_d)) {
        to_dar_n = to_dar_d = -1;
    }

    videoscale->borders_w = videoscale->borders_h = 0;
    if (to_dar_n != from_dar_n || to_dar_d != from_dar_d) {
        if (videoscale->add_borders) {
            gint n, d, to_h, to_w;

            if (from_dar_n != -1 && from_dar_d != -1
                    && gst_util_fraction_multiply (from_dar_n, from_dar_d,
                                                   out_info->par_d, out_info->par_n, &n, &d)) {
                to_h = gst_util_uint64_scale_int (out_info->width, d, n);
                if (to_h <= out_info->height) {
                    videoscale->borders_h = out_info->height - to_h;
                    videoscale->borders_w = 0;
                } else {
                    to_w = gst_util_uint64_scale_int (out_info->height, n, d);
                    g_assert (to_w <= out_info->width);
                    videoscale->borders_h = 0;
                    videoscale->borders_w = out_info->width - to_w;
                }
            } else {
                GST_WARNING_OBJECT (videoscale, "Can't calculate borders");
            }
        } else {
            GST_WARNING_OBJECT (videoscale, "Can't keep DAR!");
        }
    }

    if (in_info->width == out_info->width && in_info->height == out_info->height
            && videoscale->borders_w == 0 && videoscale->borders_h == 0) {
        gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (filter), TRUE);
    } else {
        GstStructure *options;
        GST_CAT_DEBUG_OBJECT (CAT_PERFORMANCE, filter, "setup videoscaling");
        gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (filter), FALSE);

        options = gst_structure_new_empty ("videoscale");

        switch (videoscale->method) {
        case GST_VIDEO_SCALE_NEAREST:
            gst_structure_set (options,
                               GST_VIDEO_CONVERTER_OPT_RESAMPLER_METHOD,
                               GST_TYPE_VIDEO_RESAMPLER_METHOD, GST_VIDEO_RESAMPLER_METHOD_NEAREST,
                               NULL);
            break;
        case GST_VIDEO_SCALE_BILINEAR:
            gst_structure_set (options,
                               GST_VIDEO_CONVERTER_OPT_RESAMPLER_METHOD,
                               GST_TYPE_VIDEO_RESAMPLER_METHOD, GST_VIDEO_RESAMPLER_METHOD_LINEAR,
                               GST_VIDEO_RESAMPLER_OPT_MAX_TAPS, G_TYPE_INT, 2, NULL);
            break;
        case GST_VIDEO_SCALE_4TAP:
            gst_structure_set (options,
                               GST_VIDEO_CONVERTER_OPT_RESAMPLER_METHOD,
                               GST_TYPE_VIDEO_RESAMPLER_METHOD, GST_VIDEO_RESAMPLER_METHOD_SINC,
                               GST_VIDEO_RESAMPLER_OPT_MAX_TAPS, G_TYPE_INT, 4, NULL);
            break;
        case GST_VIDEO_SCALE_LANCZOS:
            gst_structure_set (options,
                               GST_VIDEO_CONVERTER_OPT_RESAMPLER_METHOD,
                               GST_TYPE_VIDEO_RESAMPLER_METHOD, GST_VIDEO_RESAMPLER_METHOD_LANCZOS,
                               NULL);
            break;
        case GST_VIDEO_SCALE_BILINEAR2:
            gst_structure_set (options,
                               GST_VIDEO_CONVERTER_OPT_RESAMPLER_METHOD,
                               GST_TYPE_VIDEO_RESAMPLER_METHOD, GST_VIDEO_RESAMPLER_METHOD_LINEAR,
                               NULL);
            break;
        case GST_VIDEO_SCALE_SINC:
            gst_structure_set (options,
                               GST_VIDEO_CONVERTER_OPT_RESAMPLER_METHOD,
                               GST_TYPE_VIDEO_RESAMPLER_METHOD, GST_VIDEO_RESAMPLER_METHOD_SINC,
                               NULL);
            break;
        case GST_VIDEO_SCALE_HERMITE:
            gst_structure_set (options,
                               GST_VIDEO_CONVERTER_OPT_RESAMPLER_METHOD,
                               GST_TYPE_VIDEO_RESAMPLER_METHOD, GST_VIDEO_RESAMPLER_METHOD_CUBIC,
                               GST_VIDEO_RESAMPLER_OPT_CUBIC_B, G_TYPE_DOUBLE, (gdouble) 0.0,
                               GST_VIDEO_RESAMPLER_OPT_CUBIC_C, G_TYPE_DOUBLE, (gdouble) 0.0,
                               NULL);
            break;
        case GST_VIDEO_SCALE_SPLINE:
            gst_structure_set (options,
                               GST_VIDEO_CONVERTER_OPT_RESAMPLER_METHOD,
                               GST_TYPE_VIDEO_RESAMPLER_METHOD, GST_VIDEO_RESAMPLER_METHOD_CUBIC,
                               GST_VIDEO_RESAMPLER_OPT_CUBIC_B, G_TYPE_DOUBLE, (gdouble) 1.0,
                               GST_VIDEO_RESAMPLER_OPT_CUBIC_C, G_TYPE_DOUBLE, (gdouble) 0.0,
                               NULL);
            break;
        case GST_VIDEO_SCALE_CATROM:
            gst_structure_set (options,
                               GST_VIDEO_CONVERTER_OPT_RESAMPLER_METHOD,
                               GST_TYPE_VIDEO_RESAMPLER_METHOD, GST_VIDEO_RESAMPLER_METHOD_CUBIC,
                               GST_VIDEO_RESAMPLER_OPT_CUBIC_B, G_TYPE_DOUBLE, (gdouble) 0.0,
                               GST_VIDEO_RESAMPLER_OPT_CUBIC_C, G_TYPE_DOUBLE, (gdouble) 0.5,
                               NULL);
            break;
        case GST_VIDEO_SCALE_MITCHELL:
            gst_structure_set (options,
                               GST_VIDEO_CONVERTER_OPT_RESAMPLER_METHOD,
                               GST_TYPE_VIDEO_RESAMPLER_METHOD, GST_VIDEO_RESAMPLER_METHOD_CUBIC,
                               GST_VIDEO_RESAMPLER_OPT_CUBIC_B, G_TYPE_DOUBLE, (gdouble) 1.0 / 3.0,
                               GST_VIDEO_RESAMPLER_OPT_CUBIC_C, G_TYPE_DOUBLE, (gdouble) 1.0 / 3.0,
                               NULL);
            break;
        }
        gst_structure_set (options,
                           GST_VIDEO_RESAMPLER_OPT_ENVELOPE, G_TYPE_DOUBLE, videoscale->envelope,
                           GST_VIDEO_RESAMPLER_OPT_SHARPNESS, G_TYPE_DOUBLE, videoscale->sharpness,
                           GST_VIDEO_RESAMPLER_OPT_SHARPEN, G_TYPE_DOUBLE, videoscale->sharpen,
                           GST_VIDEO_CONVERTER_OPT_DEST_X, G_TYPE_INT, videoscale->borders_w / 2,
                           GST_VIDEO_CONVERTER_OPT_DEST_Y, G_TYPE_INT, videoscale->borders_h / 2,
                           GST_VIDEO_CONVERTER_OPT_DEST_WIDTH, G_TYPE_INT,
                           out_info->width - videoscale->borders_w,
                           GST_VIDEO_CONVERTER_OPT_DEST_HEIGHT, G_TYPE_INT,
                           out_info->height - videoscale->borders_h,
                           GST_VIDEO_CONVERTER_OPT_MATRIX_MODE, GST_TYPE_VIDEO_MATRIX_MODE,
                           GST_VIDEO_MATRIX_MODE_NONE, GST_VIDEO_CONVERTER_OPT_DITHER_METHOD,
                           GST_TYPE_VIDEO_DITHER_METHOD, GST_VIDEO_DITHER_NONE,
                           GST_VIDEO_CONVERTER_OPT_CHROMA_MODE, GST_TYPE_VIDEO_CHROMA_MODE,
                           GST_VIDEO_CHROMA_MODE_NONE, NULL);

        if (videoscale->gamma_decode) {
            gst_structure_set (options,
                               GST_VIDEO_CONVERTER_OPT_GAMMA_MODE, GST_TYPE_VIDEO_GAMMA_MODE,
                               GST_VIDEO_GAMMA_MODE_REMAP, NULL);
        }

        if (videoscale->convert)
            gst_video_converter_free (videoscale->convert);
        videoscale->convert = gst_video_converter_new (in_info, out_info, options);
    }

    GST_DEBUG_OBJECT (videoscale, "from=%dx%d (par=%d/%d dar=%d/%d), size %"
                      G_GSIZE_FORMAT " -> to=%dx%d (par=%d/%d dar=%d/%d borders=%d:%d), "
                      "size %" G_GSIZE_FORMAT,
                      in_info->width, in_info->height, in_info->par_n, in_info->par_d,
                      from_dar_n, from_dar_d, in_info->size, out_info->width,
                      out_info->height, out_info->par_n, out_info->par_d, to_dar_n, to_dar_d,
                      videoscale->borders_w, videoscale->borders_h, out_info->size);

    return TRUE;
}
/* Probe on the output of a parser chain (the last
 * src pad) */
static GstPadProbeReturn
parse_chain_output_probe (GstPad * pad, GstPadProbeInfo * info,
    DecodebinInputStream * input)
{
  GstPadProbeReturn ret = GST_PAD_PROBE_OK;

  if (GST_IS_EVENT (GST_PAD_PROBE_INFO_DATA (info))) {
    GstEvent *ev = GST_PAD_PROBE_INFO_EVENT (info);

    GST_DEBUG_OBJECT (pad, "Got event %s", GST_EVENT_TYPE_NAME (ev));
    switch (GST_EVENT_TYPE (ev)) {
      case GST_EVENT_STREAM_START:
      {
        GstStream *stream = NULL;
        guint group_id = G_MAXUINT32;
        gst_event_parse_group_id (ev, &group_id);
        GST_DEBUG_OBJECT (pad, "Got stream-start, group_id:%d, input %p",
            group_id, input->input);
        if (set_input_group_id (input->input, &group_id)) {
          ev = gst_event_make_writable (ev);
          gst_event_set_group_id (ev, group_id);
          GST_PAD_PROBE_INFO_DATA (info) = ev;
        }
        input->saw_eos = FALSE;

        gst_event_parse_stream (ev, &stream);
        /* FIXME : Would we ever end up with a stream already set on the input ?? */
        if (stream) {
          if (input->active_stream != stream) {
            MultiQueueSlot *slot;
            if (input->active_stream)
              gst_object_unref (input->active_stream);
            input->active_stream = stream;
            /* We have the beginning of a stream, get a multiqueue slot and link to it */
            g_mutex_lock (&input->dbin->selection_lock);
            slot = get_slot_for_input (input->dbin, input);
            link_input_to_slot (input, slot);
            g_mutex_unlock (&input->dbin->selection_lock);
          } else
            gst_object_unref (stream);
        }
      }
        break;
      case GST_EVENT_CAPS:
      {
        GstCaps *caps = NULL;
        gst_event_parse_caps (ev, &caps);
        GST_DEBUG_OBJECT (pad, "caps %" GST_PTR_FORMAT, caps);
        if (caps && input->active_stream)
          gst_stream_set_caps (input->active_stream, caps);
      }
        break;
      case GST_EVENT_EOS:
        input->saw_eos = TRUE;
        if (all_inputs_are_eos (input->dbin)) {
          GST_DEBUG_OBJECT (pad, "real input pad, marking as EOS");
          check_all_streams_for_eos (input->dbin);
        } else {
          GstPad *peer = gst_pad_get_peer (input->srcpad);
          if (peer) {
            /* Send custom-eos event to multiqueue slot */
            GstStructure *s;
            GstEvent *event;

            GST_DEBUG_OBJECT (pad,
                "Got EOS end of input stream, post custom-eos");
            s = gst_structure_new_empty ("decodebin3-custom-eos");
            event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, s);
            gst_pad_send_event (peer, event);
            gst_object_unref (peer);
          } else {
            GST_FIXME_OBJECT (pad, "No peer, what should we do ?");
          }
        }
        ret = GST_PAD_PROBE_DROP;
        break;
      case GST_EVENT_FLUSH_STOP:
        GST_DEBUG_OBJECT (pad, "Clear saw_eos flag");
        input->saw_eos = FALSE;
      default:
        break;
    }
  } else if (GST_IS_QUERY (GST_PAD_PROBE_INFO_DATA (info))) {
    GstQuery *q = GST_PAD_PROBE_INFO_QUERY (info);
    GST_DEBUG_OBJECT (pad, "Seeing query %s", GST_QUERY_TYPE_NAME (q));
    /* If we have a parser, we want to reply to the caps query */
    /* FIXME: Set a flag when the input stream is created for
     * streams where we shouldn't reply to these queries */
    if (GST_QUERY_TYPE (q) == GST_QUERY_CAPS
        && (info->type & GST_PAD_PROBE_TYPE_PULL)) {
      GstCaps *filter = NULL;
      GstCaps *allowed;
      gst_query_parse_caps (q, &filter);
      allowed = get_parser_caps_filter (input->dbin, filter);
      GST_DEBUG_OBJECT (pad,
          "Intercepting caps query, setting %" GST_PTR_FORMAT, allowed);
      gst_query_set_caps_result (q, allowed);
      gst_caps_unref (allowed);
      ret = GST_PAD_PROBE_HANDLED;
    } else if (GST_QUERY_TYPE (q) == GST_QUERY_ACCEPT_CAPS) {
      GstCaps *prop = NULL;
      gst_query_parse_accept_caps (q, &prop);
      /* Fast check against target caps */
      if (gst_caps_can_intersect (prop, input->dbin->caps))
        gst_query_set_accept_caps_result (q, TRUE);
      else {
        gboolean accepted = check_parser_caps_filter (input->dbin, prop);
        /* check against caps filter */
        gst_query_set_accept_caps_result (q, accepted);
        GST_DEBUG_OBJECT (pad, "ACCEPT_CAPS query, returning %d", accepted);
      }
      ret = GST_PAD_PROBE_HANDLED;
    }
  }

  return ret;
}
Пример #29
0
static gboolean
gst_uri_downloader_set_uri (GstUriDownloader * downloader, const gchar * uri,
    const gchar * referer, gboolean compress, gboolean refresh,
    gboolean allow_cache)
{
  GstPad *pad;
  GObjectClass *gobject_class;

  if (!gst_uri_is_valid (uri))
    return FALSE;

  if (downloader->priv->urisrc) {
    gchar *old_protocol, *new_protocol;
    gchar *old_uri;

    old_uri =
        gst_uri_handler_get_uri (GST_URI_HANDLER (downloader->priv->urisrc));
    old_protocol = gst_uri_get_protocol (old_uri);
    new_protocol = gst_uri_get_protocol (uri);

    if (!g_str_equal (old_protocol, new_protocol)) {
      gst_element_set_state (downloader->priv->urisrc, GST_STATE_NULL);
      gst_object_unref (downloader->priv->urisrc);
      downloader->priv->urisrc = NULL;
      GST_DEBUG_OBJECT (downloader, "Can't re-use old source element");
    } else {
      GError *err = NULL;

      GST_DEBUG_OBJECT (downloader, "Re-using old source element");
      if (!gst_uri_handler_set_uri (GST_URI_HANDLER (downloader->priv->urisrc),
              uri, &err)) {
        GST_DEBUG_OBJECT (downloader, "Failed to re-use old source element: %s",
            err->message);
        g_clear_error (&err);
        gst_element_set_state (downloader->priv->urisrc, GST_STATE_NULL);
        gst_object_unref (downloader->priv->urisrc);
        downloader->priv->urisrc = NULL;
      }
    }
    g_free (old_uri);
    g_free (old_protocol);
    g_free (new_protocol);
  }

  if (!downloader->priv->urisrc) {
    GST_DEBUG_OBJECT (downloader, "Creating source element for the URI:%s",
        uri);
    downloader->priv->urisrc =
        gst_element_make_from_uri (GST_URI_SRC, uri, NULL, NULL);
    if (!downloader->priv->urisrc)
      return FALSE;
  }

  gobject_class = G_OBJECT_GET_CLASS (downloader->priv->urisrc);
  if (g_object_class_find_property (gobject_class, "compress"))
    g_object_set (downloader->priv->urisrc, "compress", compress, NULL);
  if (g_object_class_find_property (gobject_class, "keep-alive"))
    g_object_set (downloader->priv->urisrc, "keep-alive", TRUE, NULL);
  if (g_object_class_find_property (gobject_class, "extra-headers")) {
    if (referer || refresh || !allow_cache) {
      GstStructure *extra_headers = gst_structure_new_empty ("headers");

      if (referer)
        gst_structure_set (extra_headers, "Referer", G_TYPE_STRING, referer,
            NULL);

      if (!allow_cache)
        gst_structure_set (extra_headers, "Cache-Control", G_TYPE_STRING,
            "no-cache", NULL);
      else if (refresh)
        gst_structure_set (extra_headers, "Cache-Control", G_TYPE_STRING,
            "max-age=0", NULL);

      g_object_set (downloader->priv->urisrc, "extra-headers", extra_headers,
          NULL);

      gst_structure_free (extra_headers);
    } else {
      g_object_set (downloader->priv->urisrc, "extra-headers", NULL, NULL);
    }
  }

  /* add a sync handler for the bus messages to detect errors in the download */
  gst_element_set_bus (GST_ELEMENT (downloader->priv->urisrc),
      downloader->priv->bus);
  gst_bus_set_sync_handler (downloader->priv->bus,
      gst_uri_downloader_bus_handler, downloader, NULL);

  pad = gst_element_get_static_pad (downloader->priv->urisrc, "src");
  if (!pad)
    return FALSE;
  gst_pad_link (pad, downloader->priv->pad);
  gst_object_unref (pad);
  return TRUE;
}
Пример #30
0
static GstCaps *
gst_rtp_h264_pay_getcaps (GstRTPBasePayload * payload, GstPad * pad,
    GstCaps * filter)
{
  GstCaps *template_caps;
  GstCaps *allowed_caps;
  GstCaps *caps, *icaps;
  gboolean append_unrestricted;
  guint i;

  allowed_caps =
      gst_pad_peer_query_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload), NULL);

  if (allowed_caps == NULL)
    return NULL;

  template_caps =
      gst_static_pad_template_get_caps (&gst_rtp_h264_pay_sink_template);

  if (gst_caps_is_any (allowed_caps)) {
    caps = gst_caps_ref (template_caps);
    goto done;
  }

  if (gst_caps_is_empty (allowed_caps)) {
    caps = gst_caps_ref (allowed_caps);
    goto done;
  }

  caps = gst_caps_new_empty ();

  append_unrestricted = FALSE;
  for (i = 0; i < gst_caps_get_size (allowed_caps); i++) {
    GstStructure *s = gst_caps_get_structure (allowed_caps, i);
    GstStructure *new_s = gst_structure_new_empty ("video/x-h264");
    const gchar *profile_level_id;

    profile_level_id = gst_structure_get_string (s, "profile-level-id");

    if (profile_level_id && strlen (profile_level_id) == 6) {
      const gchar *profile;
      const gchar *level;
      long int spsint;
      guint8 sps[3];

      spsint = strtol (profile_level_id, NULL, 16);
      sps[0] = spsint >> 16;
      sps[1] = spsint >> 8;
      sps[2] = spsint;

      profile = gst_codec_utils_h264_get_profile (sps, 3);
      level = gst_codec_utils_h264_get_level (sps, 3);

      if (profile && level) {
        GST_LOG_OBJECT (payload, "In caps, have profile %s and level %s",
            profile, level);

        if (!strcmp (profile, "constrained-baseline"))
          gst_structure_set (new_s, "profile", G_TYPE_STRING, profile, NULL);
        else {
          GValue val = { 0, };
          GValue profiles = { 0, };

          g_value_init (&profiles, GST_TYPE_LIST);
          g_value_init (&val, G_TYPE_STRING);

          g_value_set_static_string (&val, profile);
          gst_value_list_append_value (&profiles, &val);

          g_value_set_static_string (&val, "constrained-baseline");
          gst_value_list_append_value (&profiles, &val);

          gst_structure_take_value (new_s, "profile", &profiles);
        }

        if (!strcmp (level, "1"))
          gst_structure_set (new_s, "level", G_TYPE_STRING, level, NULL);
        else {
          GValue levels = { 0, };
          GValue val = { 0, };
          int j;

          g_value_init (&levels, GST_TYPE_LIST);
          g_value_init (&val, G_TYPE_STRING);

          for (j = 0; j < G_N_ELEMENTS (all_levels); j++) {
            g_value_set_static_string (&val, all_levels[j]);
            gst_value_list_prepend_value (&levels, &val);
            if (!strcmp (level, all_levels[j]))
              break;
          }
          gst_structure_take_value (new_s, "level", &levels);
        }
      } else {
        /* Invalid profile-level-id means baseline */

        gst_structure_set (new_s,
            "profile", G_TYPE_STRING, "constrained-baseline", NULL);
      }
    } else {
      /* No profile-level-id means baseline or unrestricted */

      gst_structure_set (new_s,
          "profile", G_TYPE_STRING, "constrained-baseline", NULL);
      append_unrestricted = TRUE;
    }

    caps = gst_caps_merge_structure (caps, new_s);
  }