Example #1
0
static gboolean
gst_dvdlpcmdec_setcaps (GstPad * pad, GstCaps * caps)
{
  GstStructure *structure;
  gboolean res = TRUE;
  GstDvdLpcmDec *dvdlpcmdec;

  g_return_val_if_fail (caps != NULL, FALSE);
  g_return_val_if_fail (pad != NULL, FALSE);

  dvdlpcmdec = GST_DVDLPCMDEC (gst_pad_get_parent (pad));

  structure = gst_caps_get_structure (caps, 0);

  /* If we have the DVD structured LPCM (including header) then we wait
   * for incoming data before creating the output pad caps */
  if (gst_structure_has_name (structure, "audio/x-private1-lpcm")) {
    gst_pad_set_chain_function (dvdlpcmdec->sinkpad, gst_dvdlpcmdec_chain_dvd);
    goto done;
  }

  gst_pad_set_chain_function (dvdlpcmdec->sinkpad, gst_dvdlpcmdec_chain_raw);

  res &= gst_structure_get_int (structure, "rate", &dvdlpcmdec->rate);
  res &= gst_structure_get_int (structure, "channels", &dvdlpcmdec->channels);
  res &= gst_structure_get_int (structure, "width", &dvdlpcmdec->width);
  res &= gst_structure_get_int (structure, "dynamic_range",
      &dvdlpcmdec->dynamic_range);
  res &= gst_structure_get_boolean (structure, "emphasis",
      &dvdlpcmdec->emphasis);
  res &= gst_structure_get_boolean (structure, "mute", &dvdlpcmdec->mute);

  if (!res)
    goto caps_parse_error;

  /* Output width is the input width rounded up to the nearest byte */
  if (dvdlpcmdec->width == 20)
    dvdlpcmdec->out_width = 24;
  else
    dvdlpcmdec->out_width = dvdlpcmdec->width;

  res = gst_dvdlpcmdec_set_outcaps (dvdlpcmdec);

done:
  gst_object_unref (dvdlpcmdec);
  return res;

  /* ERRORS */
caps_parse_error:
  {
    GST_DEBUG_OBJECT (dvdlpcmdec, "Couldn't get parameters; missing caps?");
    gst_object_unref (dvdlpcmdec);
    return FALSE;
  }
}
Example #2
0
static gboolean
gst_alsasink_acceptcaps (GstPad * pad, GstCaps * caps)
{
    GstAlsaSink *alsa = GST_ALSA_SINK (gst_pad_get_parent_element (pad));
    GstCaps *pad_caps;
    GstStructure *st;
    gboolean ret = FALSE;
    GstRingBufferSpec spec = { 0 };

    pad_caps = gst_pad_get_caps_reffed (pad);
    if (pad_caps) {
        ret = gst_caps_can_intersect (pad_caps, caps);
        gst_caps_unref (pad_caps);
        if (!ret)
            goto done;
    }

    /* If we've not got fixed caps, creating a stream might fail, so let's just
     * return from here with default acceptcaps behaviour */
    if (!gst_caps_is_fixed (caps))
        goto done;

    /* parse helper expects this set, so avoid nasty warning
     * will be set properly later on anyway  */
    spec.latency_time = GST_SECOND;
    if (!gst_ring_buffer_parse_caps (&spec, caps))
        goto done;

    /* Make sure input is framed (one frame per buffer) and can be payloaded */
    switch (spec.type) {
    case GST_BUFTYPE_AC3:
    case GST_BUFTYPE_EAC3:
    case GST_BUFTYPE_DTS:
    case GST_BUFTYPE_MPEG:
    {
        gboolean framed = FALSE, parsed = FALSE;
        st = gst_caps_get_structure (caps, 0);

        gst_structure_get_boolean (st, "framed", &framed);
        gst_structure_get_boolean (st, "parsed", &parsed);
        if ((!framed && !parsed) || gst_audio_iec61937_frame_size (&spec) <= 0)
            goto done;
    }
    default: {
    }
    }
    ret = TRUE;

done:
    gst_caps_replace (&spec.caps, NULL);
    gst_object_unref (alsa);
    return ret;
}
Example #3
0
static void format_check(struct ausrc_st *st, GstStructure *s)
{
	int rate, channels, width;
	gboolean sign;

	if (!st || !s)
		return;

	gst_structure_get_int(s, "rate", &rate);
	gst_structure_get_int(s, "channels", &channels);
	gst_structure_get_int(s, "width", &width);
	gst_structure_get_boolean(s, "signed", &sign);

	if ((int)st->prm.srate != rate) {
		warning("gst: expected %u Hz (got %u Hz)\n", st->prm.srate,
			rate);
	}
	if (st->prm.ch != channels) {
		warning("gst: expected %d channels (got %d)\n",
			st->prm.ch, channels);
	}
	if (16 != width) {
		warning("gst: expected 16-bit width (got %d)\n", width);
	}
	if (!sign) {
		warning("gst: expected signed 16-bit format\n");
	}
}
void
ges_base_xml_formatter_set_timeline_properties (GESBaseXmlFormatter * self,
    GESTimeline * timeline, const gchar * properties, const gchar * metadatas)
{
  GESBaseXmlFormatterPrivate *priv = _GET_PRIV (self);
  gboolean auto_transition = FALSE;

  if (properties) {
    GstStructure *props = gst_structure_from_string (properties, NULL);

    if (props) {
      if (gst_structure_get_boolean (props, "auto-transition",
              &auto_transition))
        gst_structure_remove_field (props, "auto-transition");

      gst_structure_foreach (props,
          (GstStructureForeachFunc) set_property_foreach, timeline);
      gst_structure_free (props);
    }
  }

  if (metadatas) {
    ges_meta_container_add_metas_from_string (GES_META_CONTAINER (timeline),
        metadatas);
  };

  priv->timeline_auto_transition = auto_transition;
}
Example #5
0
EXPORT_C
#endif

void
gst_mixer_message_parse_record_toggled (GstMessage * message,
    GstMixerTrack ** track, gboolean * record)
{
  const GstStructure *s;

  g_return_if_fail (gst_mixer_message_is_mixer_message (message));
  g_return_if_fail (GST_MIXER_MESSAGE_HAS_TYPE (message, RECORD_TOGGLED));

  s = gst_message_get_structure (message);

  if (track) {
    const GValue *v = gst_structure_get_value (s, "track");

    g_return_if_fail (v != NULL);
    *track = (GstMixerTrack *) g_value_get_object (v);
    g_return_if_fail (GST_IS_MIXER_TRACK (*track));
  }

  if (record)
    g_return_if_fail (gst_structure_get_boolean (s, "record", record));
}
Example #6
0
gboolean
gst_base_video_state_from_caps (GstVideoState * state, GstCaps * caps)
{

  gst_video_format_parse_caps (caps, &state->format,
      &state->width, &state->height);

  if (!gst_video_parse_caps_framerate (caps, &state->fps_n, &state->fps_d))
    return FALSE;

  state->par_n = 1;
  state->par_d = 1;
  gst_video_parse_caps_pixel_aspect_ratio (caps, &state->par_n, &state->par_d);

  {
    GstStructure *structure = gst_caps_get_structure (caps, 0);
    state->interlaced = FALSE;
    gst_structure_get_boolean (structure, "interlaced", &state->interlaced);
  }

  state->clean_width = state->width;
  state->clean_height = state->height;
  state->clean_offset_left = 0;
  state->clean_offset_top = 0;

  /* FIXME need better error handling */
  return TRUE;
}
Example #7
0
static gboolean gst_gcs_sink_event(GstPad *pad, GstEvent * event)
{
  GstGcs *gcs = GST_GCS (gst_pad_get_parent( pad ));
  gboolean ret = FALSE, facefound;
  double x,y,w,h;

  switch (GST_EVENT_TYPE(event)) {
  case GST_EVENT_CUSTOM_DOWNSTREAM:
    if (gst_event_has_name(event, "facelocation")) {
      const GstStructure* str = gst_event_get_structure(event);
      gst_structure_get_double(str, "x", &x); // check bool return
      gst_structure_get_double(str, "y", &y); // check bool return
      gst_structure_get_double(str, "width", &w); // check bool return
      gst_structure_get_double(str, "height", &h);// check bool return
      gst_structure_get_boolean(str,"facefound", &facefound);// check bool return

      gcs->facepos.x = (int)x;
      gcs->facepos.y = (int)y;
      gcs->facepos.width = (int)w;
      gcs->facepos.height = (int)h;
      gcs->facefound      = facefound;

      gst_event_unref(event);
      ret = TRUE;
    }
    break;
  case GST_EVENT_EOS:
    GST_INFO("Received EOS");
  default:
    ret = gst_pad_event_default(pad, event);
  }
  
  gst_object_unref(gcs);
  return ret;
}
/* returns format info structure, will return NULL for dynamic media types! */
static const FormatInfo *
find_format_info (const GstCaps * caps)
{
  const GstStructure *s;
  const gchar *media_type;
  guint i;

  s = gst_caps_get_structure (caps, 0);
  media_type = gst_structure_get_name (s);

  for (i = 0; i < G_N_ELEMENTS (formats); ++i) {
    if (strcmp (media_type, formats[i].type) == 0) {
      gboolean is_sys = FALSE;

      if ((formats[i].flags & FLAG_SYSTEMSTREAM) == 0)
        return &formats[i];

      /* this record should only be matched if the systemstream field is set */
      if (gst_structure_get_boolean (s, "systemstream", &is_sys) && is_sys)
        return &formats[i];
    }
  }

  return NULL;
}
Example #9
0
static gboolean
gst_rtp_dtmf_src_handle_dtmf_event (GstRTPDTMFSrc * dtmfsrc,
    const GstStructure * event_structure)
{
  gint event_type;
  gboolean start;
  gint method;
  GstClockTime last_stop;
  gint event_number;
  gint event_volume;
  gboolean correct_order;

  if (!gst_structure_get_int (event_structure, "type", &event_type) ||
      !gst_structure_get_boolean (event_structure, "start", &start) ||
      event_type != GST_RTP_DTMF_TYPE_EVENT)
    goto failure;

  if (gst_structure_get_int (event_structure, "method", &method)) {
    if (method != 1) {
      goto failure;
    }
  }

  if (start)
    if (!gst_structure_get_int (event_structure, "number", &event_number) ||
        !gst_structure_get_int (event_structure, "volume", &event_volume))
      goto failure;

  GST_OBJECT_LOCK (dtmfsrc);
  if (gst_structure_get_clock_time (event_structure, "last-stop", &last_stop))
    dtmfsrc->last_stop = last_stop;
  else
    dtmfsrc->last_stop = GST_CLOCK_TIME_NONE;
  correct_order = (start != dtmfsrc->last_event_was_start);
  dtmfsrc->last_event_was_start = start;
  GST_OBJECT_UNLOCK (dtmfsrc);

  if (!correct_order)
    goto failure;

  if (start) {
    if (!gst_structure_get_int (event_structure, "number", &event_number) ||
        !gst_structure_get_int (event_structure, "volume", &event_volume))
      goto failure;

    GST_DEBUG_OBJECT (dtmfsrc, "Received start event %d with volume %d",
        event_number, event_volume);
    gst_rtp_dtmf_src_add_start_event (dtmfsrc, event_number, event_volume);
  }

  else {
    GST_DEBUG_OBJECT (dtmfsrc, "Received stop event");
    gst_rtp_dtmf_src_add_stop_event (dtmfsrc);
  }

  return TRUE;
failure:
  return FALSE;
}
Example #10
0
static gboolean
gst_vdp_vpp_sink_setcaps (GstPad * pad, GstCaps * caps)
{
  GstVdpVideoPostProcess *vpp =
      GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad));
  GstStructure *structure;
  GstCaps *output_caps, *allowed_caps, *src_caps;
  gboolean res;

  /* extract interlaced flag */
  structure = gst_caps_get_structure (caps, 0);
  gst_structure_get_boolean (structure, "interlaced", &vpp->interlaced);

  allowed_caps = gst_pad_get_allowed_caps (vpp->srcpad);
  structure = gst_caps_get_structure (allowed_caps, 0);
  output_caps = gst_vdp_video_to_output_caps (caps);

  src_caps = gst_caps_intersect (output_caps, allowed_caps);
  gst_caps_truncate (src_caps);
  if (gst_caps_is_empty (src_caps))
    goto invalid_caps;

  GST_DEBUG ("output_caps: %" GST_PTR_FORMAT " allowed_caps: %" GST_PTR_FORMAT
      " src_caps: %" GST_PTR_FORMAT, output_caps, allowed_caps, src_caps);

  gst_caps_unref (output_caps);
  gst_caps_unref (allowed_caps);

  if (gst_vdp_vpp_is_interlaced (vpp)) {
    gint fps_n, fps_d;

    structure = gst_caps_get_structure (src_caps, 0);

    if (!gst_structure_get_fraction (structure, "framerate", &fps_n, &fps_d)) {
      gst_caps_unref (src_caps);
      goto invalid_caps;
    }

    gst_fraction_double (&fps_n, &fps_d);
    gst_structure_set (structure, "framerate", GST_TYPE_FRACTION, fps_n, fps_d,
        NULL);
    gst_structure_remove_field (structure, "interlaced");

    vpp->field_duration = gst_util_uint64_scale (GST_SECOND, fps_d, fps_n);
  }

  res = gst_pad_set_caps (vpp->srcpad, src_caps);

done:
  gst_object_unref (vpp);
  return res;

invalid_caps:
  GST_ERROR_OBJECT (vpp, "invalid caps: %" GST_PTR_FORMAT, caps);
  res = FALSE;
  goto done;
}
/**
 * pk_gst_structure_to_provide:
 **/
static gchar *
pk_gst_structure_to_provide (GstStructure *s)
{
    GString *string;
    guint i, num_fields;
    GList *l;
    _cleanup_list_free_ GList *fields = NULL;

    num_fields = gst_structure_n_fields (s);
    fields = NULL;

    for (i = 0; i < num_fields; i++) {
        const gchar *field_name;

        field_name = gst_structure_nth_field_name (s, i);
        if (pk_gst_field_get_type (field_name) < 0) {
            g_message ("PackageKit: ignoring field named %s", field_name);
            continue;
        }

        fields = g_list_insert_sorted (fields, g_strdup (field_name), (GCompareFunc) pk_gst_fields_type_compare);
    }

    string = g_string_new("");
    for (l = fields; l != NULL; l = l->next) {
        gchar *field_name;
        GType type;

        field_name = l->data;

        type = gst_structure_get_field_type (s, field_name);
        g_message ("PackageKit: field is: %s, type: %s", field_name, g_type_name (type));

        if (type == G_TYPE_INT) {
            int value;

            gst_structure_get_int (s, field_name, &value);
            g_string_append_printf (string, "(%s=%d)", field_name, value);
        } else if (type == G_TYPE_BOOLEAN) {
            int value;

            gst_structure_get_boolean (s, field_name, &value);
            g_string_append_printf (string, "(%s=%s)", field_name, value ? "true" : "false");
        } else if (type == G_TYPE_STRING) {
            const gchar *value;

            value = gst_structure_get_string (s, field_name);
            g_string_append_printf (string, "(%s=%s)", field_name, value);
        } else {
            g_warning ("PackageKit: unhandled type! %s", g_type_name (type));
        }

        g_free (field_name);
    }
    return g_string_free (string, FALSE);
}
/**
 * gst_video_event_parse_downstream_force_key_unit:
 * @event: A #GstEvent to parse
 * @timestamp: (out): A pointer to the timestamp in the event
 * @stream_time: (out): A pointer to the stream-time in the event
 * @running_time: (out): A pointer to the running-time in the event
 * @all_headers: (out): A pointer to the all_headers flag in the event
 * @count: (out): A pointer to the count field of the event
 *
 * Get timestamp, stream-time, running-time, all-headers and count in the force
 * key unit event. See gst_video_event_new_downstream_force_key_unit() for a
 * full description of the downstream force key unit event.
 *
 * @running_time will be adjusted for any pad offsets of pads it was passing through.
 *
 * Returns: %TRUE if the event is a valid downstream force key unit event.
 */
gboolean
gst_video_event_parse_downstream_force_key_unit (GstEvent * event,
    GstClockTime * timestamp, GstClockTime * stream_time,
    GstClockTime * running_time, gboolean * all_headers, guint * count)
{
  const GstStructure *s;
  GstClockTime ev_timestamp, ev_stream_time, ev_running_time;
  gboolean ev_all_headers;
  guint ev_count;

  g_return_val_if_fail (event != NULL, FALSE);

  if (GST_EVENT_TYPE (event) != GST_EVENT_CUSTOM_DOWNSTREAM)
    return FALSE;               /* Not a force key unit event */

  s = gst_event_get_structure (event);
  if (s == NULL
      || !gst_structure_has_name (s, GST_VIDEO_EVENT_FORCE_KEY_UNIT_NAME))
    return FALSE;

  if (!gst_structure_get_clock_time (s, "timestamp", &ev_timestamp))
    ev_timestamp = GST_CLOCK_TIME_NONE;
  if (!gst_structure_get_clock_time (s, "stream-time", &ev_stream_time))
    ev_stream_time = GST_CLOCK_TIME_NONE;
  if (!gst_structure_get_clock_time (s, "running-time", &ev_running_time))
    ev_running_time = GST_CLOCK_TIME_NONE;
  if (!gst_structure_get_boolean (s, "all-headers", &ev_all_headers))
    ev_all_headers = FALSE;
  if (!gst_structure_get_uint (s, "count", &ev_count))
    ev_count = 0;

  if (timestamp)
    *timestamp = ev_timestamp;

  if (stream_time)
    *stream_time = ev_stream_time;

  if (running_time) {
    gint64 offset = gst_event_get_running_time_offset (event);

    *running_time = ev_running_time;
    /* Catch underflows */
    if (*running_time > -offset)
      *running_time += offset;
    else
      *running_time = 0;
  }

  if (all_headers)
    *all_headers = ev_all_headers;

  if (count)
    *count = ev_count;

  return TRUE;
}
Example #13
0
static gboolean
gst_base_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps)
{
  GstBaseVideoDecoder *base_video_decoder;
  GstBaseVideoDecoderClass *base_video_decoder_class;
  GstStructure *structure;
  const GValue *codec_data;
  GstVideoState *state;
  gboolean ret = TRUE;

  base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));
  base_video_decoder_class =
      GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);

  GST_DEBUG ("setcaps %" GST_PTR_FORMAT, caps);

  state = &base_video_decoder->state;

  if (state->codec_data) {
    gst_buffer_unref (state->codec_data);
  }
  memset (state, 0, sizeof (GstVideoState));

  structure = gst_caps_get_structure (caps, 0);

  gst_video_format_parse_caps (caps, NULL, &state->width, &state->height);
  gst_video_parse_caps_framerate (caps, &state->fps_n, &state->fps_d);
  gst_video_parse_caps_pixel_aspect_ratio (caps, &state->par_n, &state->par_d);

#if 0
  /* requires 0.10.23 */
  state->have_interlaced =
      gst_video_format_parse_caps_interlaced (caps, &state->interlaced);
#else
  state->have_interlaced = gst_structure_get_boolean (structure,
      "interlaced", &state->interlaced);
#endif

  codec_data = gst_structure_get_value (structure, "codec_data");
  if (codec_data && G_VALUE_TYPE (codec_data) == GST_TYPE_BUFFER) {
    state->codec_data = gst_value_get_buffer (codec_data);
  }

  if (base_video_decoder_class->start) {
    ret = base_video_decoder_class->start (base_video_decoder);
  }

  g_object_unref (base_video_decoder);

  return ret;
}
Example #14
0
static gboolean
gst_base_video_encoder_sink_setcaps (GstPad * pad, GstCaps * caps)
{
  GstBaseVideoEncoder *base_video_encoder;
  GstBaseVideoEncoderClass *base_video_encoder_class;
  GstStructure *structure;
  GstVideoState *state;
  gboolean ret;

  base_video_encoder = GST_BASE_VIDEO_ENCODER (gst_pad_get_parent (pad));
  base_video_encoder_class =
      GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder);

  GST_DEBUG ("setcaps");

  state = &GST_BASE_VIDEO_CODEC (base_video_encoder)->state;
  structure = gst_caps_get_structure (caps, 0);

  gst_video_format_parse_caps (caps, &state->format,
      &state->width, &state->height);

  state->fps_n = 0;
  state->fps_d = 1;
  gst_video_parse_caps_framerate (caps, &state->fps_n, &state->fps_d);
  if (state->fps_d == 0) {
    state->fps_n = 0;
    state->fps_d = 1;
  }

  state->par_n = 1;
  state->par_d = 1;
  gst_video_parse_caps_pixel_aspect_ratio (caps, &state->par_n, &state->par_d);

  state->have_interlaced = gst_structure_get_boolean (structure,
      "interlaced", &state->interlaced);

  state->clean_width = state->width;
  state->clean_height = state->height;
  state->clean_offset_left = 0;
  state->clean_offset_top = 0;

  ret = base_video_encoder_class->set_format (base_video_encoder,
      &GST_BASE_VIDEO_CODEC (base_video_encoder)->state);
  if (ret) {
    ret = base_video_encoder_class->start (base_video_encoder);
  }

  g_object_unref (base_video_encoder);

  return ret;
}
void
ges_base_xml_formatter_add_layer (GESBaseXmlFormatter * self,
    GType extractable_type, guint priority, GstStructure * properties,
    const gchar * metadatas, GError ** error)
{
  LayerEntry *entry;
  GESAsset *asset;
  GESLayer *layer;
  gboolean auto_transition = FALSE;
  GESBaseXmlFormatterPrivate *priv = _GET_PRIV (self);

  if (priv->check_only)
    return;

  if (extractable_type == G_TYPE_NONE)
    layer = ges_layer_new ();
  else {
    asset = ges_asset_request (extractable_type, NULL, error);
    if (asset == NULL) {
      if (error && *error == NULL) {
        g_set_error (error, G_MARKUP_ERROR,
            G_MARKUP_ERROR_INVALID_CONTENT,
            "Layer type %s could not be created'",
            g_type_name (extractable_type));
        return;
      }
    }
    layer = GES_LAYER (ges_asset_extract (asset, error));
  }

  ges_layer_set_priority (layer, priority);
  ges_timeline_add_layer (GES_FORMATTER (self)->timeline, layer);
  if (properties) {
    if (gst_structure_get_boolean (properties, "auto-transition",
            &auto_transition))
      gst_structure_remove_field (properties, "auto-transition");

    gst_structure_foreach (properties,
        (GstStructureForeachFunc) set_property_foreach, layer);
  }

  if (metadatas)
    ges_meta_container_add_metas_from_string (GES_META_CONTAINER (layer),
        metadatas);

  entry = g_slice_new0 (LayerEntry);
  entry->layer = gst_object_ref (layer);
  entry->auto_trans = auto_transition;

  g_hash_table_insert (priv->layers, GINT_TO_POINTER (priority), entry);
}
Example #16
0
/**
 * gst_rtsp_token_is_allowed:
 * @token: a #GstRTSPToken
 * @field: a field name
 *
 * Check if @token has a boolean @field and if it is set to %TRUE.
 *
 * Returns: %TRUE if @token has a boolean field named @field set to %TRUE.
 */
gboolean
gst_rtsp_token_is_allowed (GstRTSPToken * token, const gchar * field)
{
  gboolean result;

  g_return_val_if_fail (GST_IS_RTSP_TOKEN (token), FALSE);
  g_return_val_if_fail (field != NULL, FALSE);

  if (!gst_structure_get_boolean (GST_RTSP_TOKEN_STRUCTURE (token), field,
          &result))
    result = FALSE;

  return result;
}
/**
 * gst_navigation_message_parse_mouse_over:
 * @message: A #GstMessage to inspect.
 * @active: A pointer to a gboolean to receive the active/inactive state,
 * or NULL.
 *
 * Parse a #GstNavigation message of type #GST_NAVIGATION_MESSAGE_MOUSE_OVER
 * and extract the active/inactive flag. If the mouse over event is marked
 * active, it indicates that the mouse is over a clickable area.
 *
 * Returns: %TRUE if the message could be successfully parsed. %FALSE if not.
 * Since: 0.10.23
 */
gboolean
gst_navigation_message_parse_mouse_over (GstMessage * message,
    gboolean * active)
{
  if (!GST_NAVIGATION_MESSAGE_HAS_TYPE (message, MOUSE_OVER))
    return FALSE;

  if (active) {
    const GstStructure *s = gst_message_get_structure (message);
    if (gst_structure_get_boolean (s, "active", active) == FALSE)
      return FALSE;
  }

  return TRUE;
}
Example #18
0
gboolean gst_goo_util_structure_is_parsed (GstStructure *structure) {
    if (gst_structure_has_field (structure, "parsed")) {
        gboolean parsed = FALSE;
        gst_structure_get_boolean (structure, "parsed", &parsed);
        if (parsed)
            return TRUE;
    }

    if (gst_structure_has_field (structure, "framed")) {
        gboolean framed = FALSE;
        gst_structure_get_boolean (structure, "framed", &framed);
        if (framed)
            return TRUE;
    }

    if (gst_structure_has_field (structure, "codec_data")) {
        const GValue *codec_data = NULL;
        codec_data = gst_structure_get_value (structure, "codec_data");
        if (codec_data != NULL)
            return TRUE;
    }

    return FALSE;
}
Example #19
0
static gboolean progress_buffer_checkgetrange(GstPad *pad)
{
    ProgressBuffer *element = PROGRESS_BUFFER(GST_PAD_PARENT(pad));
#if ENABLE_PULL_MODE
    gboolean    result = FALSE;
    GstStructure *s = gst_structure_new(GETRANGE_QUERY_NAME, NULL, NULL);
    GstQuery *query = gst_query_new_custom(GST_QUERY_CUSTOM, s);
    if (gst_pad_peer_query(pad, query))
        result = gst_structure_get_boolean(s, GETRANGE_QUERY_SUPPORTS_FIELDNANE, &result) && result;
// INLINE - gst_query_unref()
    gst_query_unref(query);
    return result;
#else
    return gst_pad_check_pull_range(element->sinkpad);
#endif
}
static GUPnPDLNABoolValue
get_bool_value_from_structure (const GstStructure *st,
                               const gchar *name)
{
        GUPnPDLNABoolValue value = GUPNP_DLNA_BOOL_VALUE_UNSET;

        if (st != NULL) {
                gboolean data;

                if (gst_structure_get_boolean (st, name, &data)) {
                        value.state = GUPNP_DLNA_VALUE_STATE_SET;
                        value.value = data;
                }
        }

        return value;
}
Example #21
0
static gboolean
gst_teletextdec_sink_setcaps (GstPad * pad, GstCaps * caps)
{
  GstTeletextDec *teletext = GST_TELETEXTDEC (gst_pad_get_parent (pad));
  GstStructure *structure = gst_caps_get_structure (caps, 0);
  const gchar *mimetype = gst_structure_get_name (structure);

  GST_DEBUG_OBJECT (teletext, "%s:%s, caps=%" GST_PTR_FORMAT,
      GST_DEBUG_PAD_NAME (pad), caps);

  if (g_strcmp0 (mimetype, "private/teletext") == 0) {
    teletext->process_buf_func = gst_teletextdec_process_telx_buffer;
    goto accept_caps;
  } else if (g_strcmp0 (mimetype, "video/mpeg") == 0) {
    gint version;
    gboolean is_systemstream;

    if (!gst_structure_get_int (structure, "mpegversion", &version) ||
        !gst_structure_get_boolean (structure, "systemstream",
            &is_systemstream))
      goto refuse_caps;

    if (version != 2 || !is_systemstream)
      goto refuse_caps;

    teletext->process_buf_func = gst_teletextdec_process_pes_buffer;
    teletext->demux = vbi_dvb_pes_demux_new (gst_teletextdec_convert, teletext);
    goto accept_caps;
  } else
    goto refuse_caps;

accept_caps:
  {
    gst_object_unref (teletext);
    return gst_teletextdec_push_preroll_buffer (teletext);
  }

refuse_caps:
  {
    GST_ERROR_OBJECT (teletext,
        "pad %s refused renegotiation to %" GST_PTR_FORMAT,
        GST_PAD_NAME (pad), caps);
    gst_object_unref (teletext);
    return FALSE;
  }
}
Example #22
0
static gboolean generic_init(GstTIDmaidec *dmaidec){
    GstStructure *capStruct;
    struct gstti_generic_parser_private *priv;
    GstCaps      *caps = GST_PAD_CAPS(dmaidec->sinkpad);

    /* Initialize GST_LOG for this object */
    GST_DEBUG_CATEGORY_INIT(gst_tisupport_generic_debug, "TISupportGeneric", 0,
        "DMAI plugins Generic Support functions");

    priv = g_malloc0(sizeof(struct gstti_generic_parser_private));
    g_assert(priv != NULL);
    priv->parsed = FALSE;

    if (!caps)
        goto done;
    
    capStruct = gst_caps_get_structure(caps,0);
    if (!capStruct)
        goto done;

    /* Read extra data passed via demuxer. */
    gst_structure_get_boolean(capStruct, "parsed",&priv->parsed);

/* Disable optimization for now, seems like some decoders aren't
 * that happy with it
 */
#if 0
    /* If we have a parsed stream we don't behave as circular buffer,
     * but instead we just pass the full frames we received down to the
     * decoders.
     */
    if (priv->parsed){
        GST_INFO("Using parsed stream");
        if (dmaidec->numInputBufs == 0) {
            dmaidec->numInputBufs = 1;
        }
    }
#endif

done:
    dmaidec->parser_private = priv;
    
    GST_DEBUG("Parser initialized");
    return TRUE;
}
Example #23
0
static gboolean
spc_negotiate (GstSpcDec * spc)
{
  GstCaps *allowed, *caps;
  GstStructure *structure;
  gint width = 16, depth = 16;
  gboolean sign;
  int rate = 32000;
  int channels = 2;

  allowed = gst_pad_get_allowed_caps (spc->srcpad);
  if (!allowed) {
    GST_DEBUG_OBJECT (spc, "couldn't get allowed caps");
    return FALSE;
  }

  GST_DEBUG_OBJECT (spc, "allowed caps: %" GST_PTR_FORMAT, allowed);

  structure = gst_caps_get_structure (allowed, 0);
  gst_structure_get_int (structure, "width", &width);
  gst_structure_get_int (structure, "depth", &depth);

  if (width && depth && width != depth) {
    GST_DEBUG_OBJECT (spc, "width %d and depth %d are different", width, depth);
    gst_caps_unref (allowed);
    return FALSE;
  }

  gst_structure_get_boolean (structure, "signed", &sign);
  gst_structure_get_int (structure, "rate", &rate);
  gst_structure_get_int (structure, "channels", &channels);

  caps = gst_caps_new_simple ("audio/x-raw-int",
      "endianness", G_TYPE_INT, G_BYTE_ORDER,
      "signed", G_TYPE_BOOLEAN, TRUE,
      "width", G_TYPE_INT, width,
      "depth", G_TYPE_INT, depth,
      "rate", G_TYPE_INT, rate, "channels", G_TYPE_INT, channels, NULL);
  gst_pad_set_caps (spc->srcpad, caps);

  gst_caps_unref (caps);
  gst_caps_unref (allowed);

  return TRUE;
}
Example #24
0
static gboolean
gst_ffmpegdeinterlace_sink_setcaps (GstPad * pad, GstCaps * caps)
{
  GstFFMpegDeinterlace *deinterlace =
      GST_FFMPEGDEINTERLACE (gst_pad_get_parent (pad));
  GstStructure *structure = gst_caps_get_structure (caps, 0);
  AVCodecContext *ctx;
  GstCaps *src_caps;
  gboolean ret;

  if (!gst_structure_get_int (structure, "width", &deinterlace->width))
    return FALSE;
  if (!gst_structure_get_int (structure, "height", &deinterlace->height))
    return FALSE;

  deinterlace->interlaced = FALSE;
  gst_structure_get_boolean (structure, "interlaced", &deinterlace->interlaced);
  gst_ffmpegdeinterlace_update_passthrough (deinterlace);

  ctx = avcodec_alloc_context ();
  ctx->width = deinterlace->width;
  ctx->height = deinterlace->height;
  ctx->pix_fmt = PIX_FMT_NB;
  gst_ffmpeg_caps_with_codectype (AVMEDIA_TYPE_VIDEO, caps, ctx);
  if (ctx->pix_fmt == PIX_FMT_NB) {
    av_free (ctx);
    return FALSE;
  }

  deinterlace->pixfmt = ctx->pix_fmt;

  av_free (ctx);

  deinterlace->to_size =
      avpicture_get_size (deinterlace->pixfmt, deinterlace->width,
      deinterlace->height);

  src_caps = gst_caps_copy (caps);
  gst_caps_set_simple (src_caps, "interlaced", G_TYPE_BOOLEAN,
      deinterlace->interlaced, NULL);
  ret = gst_pad_set_caps (deinterlace->srcpad, src_caps);
  gst_caps_unref (src_caps);

  return ret;
}
static gboolean
dbin_autoplug_continue_cb (GstElement * dbin, GstPad * pad, GstCaps * caps,
    gpointer * user_data)
{
  GstSingleDecodeBin *sdbin = GST_SINGLE_DECODE_BIN (user_data);
  GstStructure *s;
  gboolean parsed = FALSE;

  if (!sdbin->parse_only)
    return TRUE;

  /* We will only get fixed caps in autoplug-continue */
  s = gst_caps_get_structure (caps, 0);
  if (gst_structure_get_boolean (s, "parsed", &parsed) && parsed) {
    /* A parser has been plugged in and that's all that we want */
    return FALSE;
  }

  return TRUE;
}
Example #26
0
static gboolean
gst_dtmf_src_handle_dtmf_event (GstDTMFSrc * dtmfsrc,
    const GstStructure * event_structure)
{
  gint event_type;
  gboolean start;
  gint method;

  if (!gst_structure_get_int (event_structure, "type", &event_type) ||
      !gst_structure_get_boolean (event_structure, "start", &start) ||
      (start == TRUE && event_type != GST_TONE_DTMF_TYPE_EVENT))
    goto failure;

  if (gst_structure_get_int (event_structure, "method", &method)) {
    if (method != 2) {
      goto failure;
    }
  }

  if (start) {
    gint event_number;
    gint event_volume;

    if (!gst_structure_get_int (event_structure, "number", &event_number) ||
        !gst_structure_get_int (event_structure, "volume", &event_volume))
      goto failure;

    GST_DEBUG_OBJECT (dtmfsrc, "Received start event %d with volume %d",
        event_number, event_volume);
    gst_dtmf_src_add_start_event (dtmfsrc, event_number, event_volume);
  }

  else {
    GST_DEBUG_OBJECT (dtmfsrc, "Received stop event");
    gst_dtmf_src_add_stop_event (dtmfsrc);
  }

  return TRUE;
failure:
  return FALSE;
}
Example #27
0
void basedecoder_set_codec_data(BaseDecoder *decoder, GstStructure *s)
{
    if (!gst_structure_get_boolean(s, "hls", &decoder->is_hls))
        decoder->is_hls = FALSE;

    const GValue *value = gst_structure_get_value(s, "codec_data");
    if (value)
    {
        GstBuffer* codec_data_buf = gst_value_get_buffer(value);
        if (codec_data_buf)
        {
            GstMapInfo info;
            if (gst_buffer_map(codec_data_buf, &info, GST_MAP_READ))
            {
                decoder->codec_data_size = info.size;
                decoder->codec_data = g_memdup(info.data, info.size);
                gst_buffer_unmap(codec_data_buf, &info);
            }
        }
    }
}
/**
 * gst_video_event_parse_still_frame:
 * @event: A #GstEvent to parse
 * @in_still: A boolean to receive the still-frame status from the event, or NULL
 *
 * Parse a #GstEvent, identify if it is a Still Frame event, and
 * return the still-frame state from the event if it is.
 * If the event represents the start of a still frame, the in_still
 * variable will be set to TRUE, otherwise FALSE. It is OK to pass NULL for the
 * in_still variable order to just check whether the event is a valid still-frame
 * event.
 *
 * Create a still frame event using gst_video_event_new_still_frame()
 *
 * Returns: %TRUE if the event is a valid still-frame event. %FALSE if not
 */
gboolean
gst_video_event_parse_still_frame (GstEvent * event, gboolean * in_still)
{
  const GstStructure *s;
  gboolean ev_still_state;

  g_return_val_if_fail (event != NULL, FALSE);

  if (GST_EVENT_TYPE (event) != GST_EVENT_CUSTOM_DOWNSTREAM)
    return FALSE;               /* Not a still frame event */

  s = gst_event_get_structure (event);
  if (s == NULL
      || !gst_structure_has_name (s, GST_VIDEO_EVENT_STILL_STATE_NAME))
    return FALSE;               /* Not a still frame event */
  if (!gst_structure_get_boolean (s, "still-state", &ev_still_state))
    return FALSE;               /* Not a still frame event */
  if (in_still)
    *in_still = ev_still_state;
  return TRUE;
}
Example #29
0
static gboolean
rsn_parsetter_sink_event (GstPad * pad, GstEvent * event)
{
  RsnParSetter *parset = RSN_PARSETTER (gst_pad_get_parent (pad));
  const GstStructure *structure = gst_event_get_structure (event);

  if (structure != NULL &&
      gst_structure_has_name (structure, "application/x-gst-dvd")) {
    const char *type = gst_structure_get_string (structure, "event");
    if (type == NULL)
      goto out;

    if (strcmp (type, "dvd-video-format") == 0) {
      gboolean is_widescreen;

      gst_structure_get_boolean (structure, "video-widescreen", &is_widescreen);

      GST_DEBUG_OBJECT (parset, "Video is %s",
          parset->is_widescreen ? "16:9" : "4:3");

      g_mutex_lock (parset->caps_lock);
      if (parset->is_widescreen != is_widescreen) {
        /* Force caps check */
        gst_caps_replace (&parset->in_caps_last, NULL);
        gst_caps_replace (&parset->in_caps_converted, NULL);
      }
      parset->is_widescreen = is_widescreen;

      /* FIXME: Added for testing: */
      // parset->is_widescreen = FALSE;

      g_mutex_unlock (parset->caps_lock);
    }
  }

out:
  gst_object_unref (GST_OBJECT (parset));
  return gst_pad_event_default (pad, event);
}
Example #30
0
void
gst_lv2_source_register_element (GstPlugin * plugin, GstStructure * lv2_meta)
{
  GTypeInfo info = {
    sizeof (GstLV2SourceClass),
    (GBaseInitFunc) gst_lv2_source_base_init,
    (GBaseFinalizeFunc) gst_lv2_source_base_finalize,
    (GClassInitFunc) gst_lv2_source_class_init,
    NULL,
    NULL,
    sizeof (GstLV2Source),
    0,
    (GInstanceInitFunc) gst_lv2_source_init,
  };
  const gchar *type_name =
      gst_structure_get_string (lv2_meta, "element-type-name");
  GType element_type =
      g_type_register_static (GST_TYPE_BASE_SRC, type_name, &info, 0);
  gboolean can_do_presets;

  /* register interfaces */
  gst_structure_get_boolean (lv2_meta, "can-do-presets", &can_do_presets);
  if (can_do_presets) {
    const GInterfaceInfo preset_interface_info = {
      (GInterfaceInitFunc) gst_lv2_source_preset_interface_init,
      NULL,
      NULL
    };

    g_type_add_interface_static (element_type, GST_TYPE_PRESET,
        &preset_interface_info);
  }

  gst_element_register (plugin, type_name, GST_RANK_NONE, element_type);

  if (!parent_class)
    parent_class = g_type_class_ref (GST_TYPE_BASE_SRC);
}