static GstStructure *
collect_stream_information (GstDiscoverer * dc, PrivateStream * ps, guint idx)
{
  GstCaps *caps;
  GstStructure *st;
  gchar *stname;

  stname = g_strdup_printf ("stream-%02d", idx);
  st = gst_structure_empty_new (stname);
  g_free (stname);

  /* Get caps */
  caps = gst_pad_get_negotiated_caps (ps->pad);
  if (!caps) {
    GST_WARNING ("Couldn't get negotiated caps from %s:%s",
        GST_DEBUG_PAD_NAME (ps->pad));
    caps = gst_pad_get_caps (ps->pad);
  }
  if (caps) {
    GST_DEBUG ("Got caps %" GST_PTR_FORMAT, caps);
    gst_structure_id_set (st, _CAPS_QUARK, GST_TYPE_CAPS, caps, NULL);

    gst_caps_unref (caps);
  }
  if (ps->tags)
    gst_structure_id_set (st, _TAGS_QUARK, GST_TYPE_STRUCTURE, ps->tags, NULL);

  return st;
}
Exemple #2
0
/**
 * gst_query_new_buffering
 * @format: the default #GstFormat for the new query
 *
 * Constructs a new query object for querying the buffering status of
 * a stream.
 *
 * Returns: A #GstQuery
 *
 * Since: 0.10.20
 */
GstQuery *
gst_query_new_buffering (GstFormat format)
{
  GstQuery *query;
  GstStructure *structure;

  structure = gst_structure_empty_new ("GstQueryBuffering");
  /* by default, we configure the answer as no buffering with a 100% buffering
   * progress */
  gst_structure_id_set (structure,
      GST_QUARK (BUSY), G_TYPE_BOOLEAN, FALSE,
      GST_QUARK (BUFFER_PERCENT), G_TYPE_INT, 100,
      GST_QUARK (BUFFERING_MODE), GST_TYPE_BUFFERING_MODE, GST_BUFFERING_STREAM,
      GST_QUARK (AVG_IN_RATE), G_TYPE_INT, -1,
      GST_QUARK (AVG_OUT_RATE), G_TYPE_INT, -1,
      GST_QUARK (BUFFERING_LEFT), G_TYPE_INT64, G_GINT64_CONSTANT (0),
      GST_QUARK (ESTIMATED_TOTAL), G_TYPE_INT64, G_GINT64_CONSTANT (-1),
      GST_QUARK (FORMAT), GST_TYPE_FORMAT, format,
      GST_QUARK (START_VALUE), G_TYPE_INT64, G_GINT64_CONSTANT (-1),
      GST_QUARK (STOP_VALUE), G_TYPE_INT64, G_GINT64_CONSTANT (-1), NULL);

  query = gst_query_new (GST_QUERY_BUFFERING, structure);

  return query;
}
static GstCaps *
gst_dc1394_get_all_dc1394_caps (void)
{
    /*
       generate all possible caps

     */

    GstCaps *gcaps;
    gint i = 0;

    gcaps = gst_caps_new_empty ();
    // first, the fixed mode caps
    for (i = DC1394_VIDEO_MODE_MIN; i < DC1394_VIDEO_MODE_EXIF; i++) {
        GstStructure *gs = gst_structure_empty_new ("video");
        gint ret = gst_dc1394_caps_set_format_vmode_caps (gs, i);

        gst_structure_set (gs,
                           "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
        gst_structure_set (gs, "vmode", G_TYPE_INT, i, NULL);
        if (ret >= 0) {
            gst_caps_append_structure (gcaps, gs);
        }
    }

    // then Format 7 options

    for (i = DC1394_COLOR_CODING_MIN; i <= DC1394_COLOR_CODING_MAX; i++) {
        GstStructure *gs = gst_structure_empty_new ("video");

        //int ret =  gst_dc1394_caps_set_format_vmode_caps(gs, i);

        gst_structure_set (gs, "vmode", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL);

        gst_structure_set (gs,
                           "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
        gst_structure_set (gs,
                           "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
                           "height", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL);

        if (gst_dc1394_set_caps_color (gs, i)) {
            gst_caps_append_structure (gcaps, gs);
        }
    }
    return gcaps;

}
void QGstreamerPlayerSession::playbinNotifySource(GObject *o, GParamSpec *p, gpointer d)
{
    Q_UNUSED(p);

    GstElement *source = 0;
    g_object_get(o, "source", &source, NULL);
    if (source == 0)
        return;

    // Turn off icecast metadata request, will be re-set if in QNetworkRequest
    // (souphttpsrc docs say is false by default, but header appears in request
    // @version 0.10.21)
    if (g_object_class_find_property(G_OBJECT_GET_CLASS(source), "iradio-mode") != 0)
        g_object_set(G_OBJECT(source), "iradio-mode", FALSE, NULL);


    // Set Headers
    const QByteArray userAgentString("User-Agent");

    QGstreamerPlayerSession *self = reinterpret_cast<QGstreamerPlayerSession *>(d);

    // User-Agent - special case, souphhtpsrc will always set something, even if
    // defined in extra-headers
    if (g_object_class_find_property(G_OBJECT_GET_CLASS(source), "user-agent") != 0) {
        g_object_set(G_OBJECT(source), "user-agent",
                     self->m_request.rawHeader(userAgentString).constData(), NULL);
    }

    // The rest
    if (g_object_class_find_property(G_OBJECT_GET_CLASS(source), "extra-headers") != 0) {
        GstStructure *extras = gst_structure_empty_new("extras");

        foreach (const QByteArray &rawHeader, self->m_request.rawHeaderList()) {
            if (rawHeader == userAgentString) // Filter User-Agent
                continue;
            else {
                GValue headerValue;

                memset(&headerValue, 0, sizeof(GValue));
                g_value_init(&headerValue, G_TYPE_STRING);

                g_value_set_string(&headerValue,
                                   self->m_request.rawHeader(rawHeader).constData());

                gst_structure_set_value(extras, rawHeader.constData(), &headerValue);
            }
        }

        if (gst_structure_n_fields(extras) > 0)
            g_object_set(G_OBJECT(source), "extra-headers", extras, NULL);

        gst_structure_free(extras);
    }
Exemple #5
0
/**
 * gst_query_new_duration:
 * @format: the #GstFormat for this duration query
 *
 * Constructs a new stream duration query object to query in the given format. 
 * Use gst_query_unref() when done with it. A duration query will give the
 * total length of the stream.
 *
 * Returns: A #GstQuery
 */
GstQuery *
gst_query_new_duration (GstFormat format)
{
  GstQuery *query;
  GstStructure *structure;

  structure = gst_structure_empty_new ("GstQueryDuration");
  gst_structure_id_set (structure,
      GST_QUARK (FORMAT), GST_TYPE_FORMAT, format,
      GST_QUARK (DURATION), G_TYPE_INT64, G_GINT64_CONSTANT (-1), NULL);

  query = gst_query_new (GST_QUERY_DURATION, structure);

  return query;
}
Exemple #6
0
static GstFlowReturn
gst_dvbsrc_create (GstPushSrc * element, GstBuffer ** buf)
{
    gint buffer_size;
    GstFlowReturn retval = GST_FLOW_ERROR;
    GstDvbSrc *object;

    object = GST_DVBSRC (element);
    GST_LOG ("fd_dvr: %d", object->fd_dvr);

    //g_object_get(G_OBJECT(object), "blocksize", &buffer_size, NULL);
    buffer_size = DEFAULT_BUFFER_SIZE;

    /* device can not be tuned during read */
    g_mutex_lock (object->tune_mutex);


    if (object->fd_dvr > -1) {
        /* --- Read TS from DVR device --- */
        GST_DEBUG_OBJECT (object, "Reading from DVR device");
        *buf = read_device (object->fd_dvr, object->adapter_number,
                            object->frontend_number, buffer_size, object);
        if (*buf != NULL) {
            GstCaps *caps;

            retval = GST_FLOW_OK;

            caps = gst_pad_get_caps (GST_BASE_SRC_PAD (object));
            gst_buffer_set_caps (*buf, caps);
            gst_caps_unref (caps);
        } else {
            GST_DEBUG_OBJECT (object, "Failed to read from device");
            gst_element_post_message (GST_ELEMENT_CAST (object),
                                      gst_message_new_element (GST_OBJECT (object),
                                              gst_structure_empty_new ("dvb-read-failure")));
        }

        if (object->stats_interval != 0 &&
                ++object->stats_counter == object->stats_interval) {
            gst_dvbsrc_output_frontend_stats (object);
            object->stats_counter = 0;
        }
    }

    g_mutex_unlock (object->tune_mutex);
    return retval;

}
Exemple #7
0
/**
 * gst_query_new_latency:
 *
 * Constructs a new latency query object. 
 * Use gst_query_unref() when done with it. A latency query is usually performed
 * by sinks to compensate for additional latency introduced by elements in the
 * pipeline.
 *
 * Returns: A #GstQuery
 *
 * Since: 0.10.12
 */
GstQuery *
gst_query_new_latency (void)
{
  GstQuery *query;
  GstStructure *structure;

  structure = gst_structure_empty_new ("GstQueryLatency");
  gst_structure_id_set (structure,
      GST_QUARK (LIVE), G_TYPE_BOOLEAN, FALSE,
      GST_QUARK (MIN_LATENCY), G_TYPE_UINT64, G_GUINT64_CONSTANT (0),
      GST_QUARK (MAX_LATENCY), G_TYPE_UINT64, G_GUINT64_CONSTANT (-1), NULL);

  query = gst_query_new (GST_QUERY_LATENCY, structure);

  return query;
}
Exemple #8
0
/**
 * gst_query_new_seeking (GstFormat *format)
 * @format: the default #GstFormat for the new query
 *
 * Constructs a new query object for querying seeking properties of
 * the stream. 
 *
 * Returns: A #GstQuery
 */
GstQuery *
gst_query_new_seeking (GstFormat format)
{
  GstQuery *query;
  GstStructure *structure;

  structure = gst_structure_empty_new ("GstQuerySeeking");
  gst_structure_id_set (structure,
      GST_QUARK (FORMAT), GST_TYPE_FORMAT, format,
      GST_QUARK (SEEKABLE), G_TYPE_BOOLEAN, FALSE,
      GST_QUARK (SEGMENT_START), G_TYPE_INT64, G_GINT64_CONSTANT (-1),
      GST_QUARK (SEGMENT_END), G_TYPE_INT64, G_GINT64_CONSTANT (-1), NULL);

  query = gst_query_new (GST_QUERY_SEEKING, structure);

  return query;
}
Exemple #9
0
/**
 * gst_query_new_segment:
 * @format: the #GstFormat for the new query
 *
 * Constructs a new segment query object. Use gst_query_unref()
 * when done with it. A segment query is used to discover information about the
 * currently configured segment for playback.
 *
 * Returns: a #GstQuery
 */
GstQuery *
gst_query_new_segment (GstFormat format)
{
  GstQuery *query;
  GstStructure *structure;

  structure = gst_structure_empty_new ("GstQuerySegment");
  gst_structure_id_set (structure,
      GST_QUARK (RATE), G_TYPE_DOUBLE, (gdouble) 0.0,
      GST_QUARK (FORMAT), GST_TYPE_FORMAT, format,
      GST_QUARK (START_VALUE), G_TYPE_INT64, G_GINT64_CONSTANT (-1),
      GST_QUARK (STOP_VALUE), G_TYPE_INT64, G_GINT64_CONSTANT (-1), NULL);

  query = gst_query_new (GST_QUERY_SEGMENT, structure);

  return query;
}
Exemple #10
0
/**
 * gst_query_new_convert:
 * @src_format: the source #GstFormat for the new query
 * @value: the value to convert
 * @dest_format: the target #GstFormat
 *
 * Constructs a new convert query object. Use gst_query_unref()
 * when done with it. A convert query is used to ask for a conversion between
 * one format and another.
 *
 * Returns: A #GstQuery
 */
GstQuery *
gst_query_new_convert (GstFormat src_format, gint64 value,
    GstFormat dest_format)
{
  GstQuery *query;
  GstStructure *structure;

  structure = gst_structure_empty_new ("GstQueryConvert");
  gst_structure_id_set (structure,
      GST_QUARK (SRC_FORMAT), GST_TYPE_FORMAT, src_format,
      GST_QUARK (SRC_VALUE), G_TYPE_INT64, value,
      GST_QUARK (DEST_FORMAT), GST_TYPE_FORMAT, dest_format,
      GST_QUARK (DEST_VALUE), G_TYPE_INT64, G_GINT64_CONSTANT (-1), NULL);

  query = gst_query_new (GST_QUERY_CONVERT, structure);

  return query;
}
static void pocketvox_recognizer_process_result(GstElement* sphinx, gchar *hyp, gchar* uttid, gpointer data)
{
	GstStructure *stt = gst_structure_empty_new("result");
	GValue hypv = G_VALUE_INIT, uttidv = G_VALUE_INIT;
	GstMessage *msg;

	g_value_init(&hypv, G_TYPE_STRING);
	g_value_init(&uttidv, G_TYPE_STRING);

	g_value_set_string(&hypv, g_strdup(hyp));
	g_value_set_string(&uttidv, g_strdup(uttid));

	gst_structure_set_value(stt,"hyp",	&hypv);
	gst_structure_set_value(stt,"uttid",&uttidv);

	msg = gst_message_new_custom( GST_MESSAGE_APPLICATION, GST_OBJECT(sphinx), stt);
	gst_element_post_message(sphinx, msg);
}
static void
fetch_element_table (GstPlugin * plugin)
{
  gchar *path;
  gchar *config, *s;
  GstStructure *tmp, *element;

  element_table = gst_plugin_get_cache_data (plugin);

  if (element_table)
    return;

  path = get_config_path ();

  if (!g_file_get_contents (path, &config, NULL, NULL)) {
    g_warning ("could not find config file '%s'.. using defaults!", path);
    config = (gchar *) default_config;
  }

  gst_plugin_add_dependency_simple (plugin, "ONX_CONFIG", path, NULL,
      GST_PLUGIN_DEPENDENCY_FLAG_NONE);

  g_free (path);

  GST_DEBUG ("parsing config:\n%s", config);

  tmp = gst_structure_empty_new ("element_table");

  s = config;

  while ((element = gst_structure_from_string (s, &s))) {
    const gchar *element_name = gst_structure_get_name (element);
    gst_structure_set (tmp, element_name, GST_TYPE_STRUCTURE, element, NULL);
  }

  if (config != default_config)
    g_free (config);

  GST_DEBUG ("element_table=%" GST_PTR_FORMAT, tmp);

  gst_plugin_set_cache_data (plugin, tmp);

  element_table = tmp;
}
/**
 * send_hls_not_full_message
 *
 * Sends HLS NOT FULL message to the bus.
 */
static void send_hls_not_full_message(HLSProgressBuffer* element)
{
    GstStructure *s = gst_structure_empty_new(HLS_PB_MESSAGE_NOT_FULL);
    GstMessage *msg = gst_message_new_application(GST_OBJECT(element), s);
    gst_element_post_message(GST_ELEMENT(element), msg);
}
static GstStructure *gst_avdtp_sink_parse_mpeg_caps(
			GstAvdtpSink *self, mpeg_capabilities_t *mpeg)
{
	GstStructure *structure;
	GValue *value;
	GValue *list;
	gboolean valid_layer = FALSE;
	gboolean mono, stereo;

	if (!mpeg)
		return NULL;

	GST_LOG_OBJECT(self, "parsing mpeg caps");

	structure = gst_structure_empty_new("audio/mpeg");
	value = g_new0(GValue, 1);
	g_value_init(value, G_TYPE_INT);

	list = g_value_init(g_new0(GValue, 1), GST_TYPE_LIST);
	g_value_set_int(value, 1);
	gst_value_list_prepend_value(list, value);
	g_value_set_int(value, 2);
	gst_value_list_prepend_value(list, value);
	gst_structure_set_value(structure, "mpegversion", list);
	g_free(list);

	/* layer */
	GST_LOG_OBJECT(self, "setting mpeg layer");
	list = g_value_init(g_new0(GValue, 1), GST_TYPE_LIST);
	if (mpeg->layer & BT_MPEG_LAYER_1) {
		g_value_set_int(value, 1);
		gst_value_list_prepend_value(list, value);
		valid_layer = TRUE;
	}
	if (mpeg->layer & BT_MPEG_LAYER_2) {
		g_value_set_int(value, 2);
		gst_value_list_prepend_value(list, value);
		valid_layer = TRUE;
	}
	if (mpeg->layer & BT_MPEG_LAYER_3) {
		g_value_set_int(value, 3);
		gst_value_list_prepend_value(list, value);
		valid_layer = TRUE;
	}
	if (list) {
		gst_structure_set_value(structure, "layer", list);
		g_free(list);
		list = NULL;
	}

	if (!valid_layer) {
		gst_structure_free(structure);
		g_free(value);
		return NULL;
	}

	/* rate */
	GST_LOG_OBJECT(self, "setting mpeg rate");
	list = g_value_init(g_new0(GValue, 1), GST_TYPE_LIST);
	if (mpeg->frequency & BT_MPEG_SAMPLING_FREQ_48000) {
		g_value_set_int(value, 48000);
		gst_value_list_prepend_value(list, value);
	}
	if (mpeg->frequency & BT_MPEG_SAMPLING_FREQ_44100) {
		g_value_set_int(value, 44100);
		gst_value_list_prepend_value(list, value);
	}
	if (mpeg->frequency & BT_MPEG_SAMPLING_FREQ_32000) {
		g_value_set_int(value, 32000);
		gst_value_list_prepend_value(list, value);
	}
	if (mpeg->frequency & BT_MPEG_SAMPLING_FREQ_24000) {
		g_value_set_int(value, 24000);
		gst_value_list_prepend_value(list, value);
	}
	if (mpeg->frequency & BT_MPEG_SAMPLING_FREQ_22050) {
		g_value_set_int(value, 22050);
		gst_value_list_prepend_value(list, value);
	}
	if (mpeg->frequency & BT_MPEG_SAMPLING_FREQ_16000) {
		g_value_set_int(value, 16000);
		gst_value_list_prepend_value(list, value);
	}
	g_value_unset(value);
	if (list) {
		gst_structure_set_value(structure, "rate", list);
		g_free(list);
		list = NULL;
	}

	/* channels */
	GST_LOG_OBJECT(self, "setting mpeg channels");
	mono = FALSE;
	stereo = FALSE;
	if (mpeg->channel_mode & BT_A2DP_CHANNEL_MODE_MONO)
		mono = TRUE;
	if ((mpeg->channel_mode & BT_A2DP_CHANNEL_MODE_STEREO) ||
			(mpeg->channel_mode &
			BT_A2DP_CHANNEL_MODE_DUAL_CHANNEL) ||
			(mpeg->channel_mode &
			BT_A2DP_CHANNEL_MODE_JOINT_STEREO))
		stereo = TRUE;

	if (mono && stereo) {
		g_value_init(value, GST_TYPE_INT_RANGE);
		gst_value_set_int_range(value, 1, 2);
	} else {
		g_value_init(value, G_TYPE_INT);
		if (mono)
			g_value_set_int(value, 1);
		else if (stereo)
			g_value_set_int(value, 2);
		else {
			GST_ERROR_OBJECT(self,
				"Unexpected number of channels");
			g_value_set_int(value, 0);
		}
	}
	gst_structure_set_value(structure, "channels", value);
	g_free(value);

	return structure;
}
static GstStructure *gst_avdtp_sink_parse_sbc_caps(
			GstAvdtpSink *self, sbc_capabilities_t *sbc)
{
	GstStructure *structure;
	GValue *value;
	GValue *list;
	gboolean mono, stereo;

	structure = gst_structure_empty_new("audio/x-sbc");
	value = g_value_init(g_new0(GValue, 1), G_TYPE_STRING);

	/* mode */
	list = g_value_init(g_new0(GValue, 1), GST_TYPE_LIST);
	if (sbc->channel_mode & BT_A2DP_CHANNEL_MODE_MONO) {
		g_value_set_static_string(value, "mono");
		gst_value_list_prepend_value(list, value);
	}
	if (sbc->channel_mode & BT_A2DP_CHANNEL_MODE_STEREO) {
		g_value_set_static_string(value, "stereo");
		gst_value_list_prepend_value(list, value);
	}
	if (sbc->channel_mode & BT_A2DP_CHANNEL_MODE_DUAL_CHANNEL) {
		g_value_set_static_string(value, "dual");
		gst_value_list_prepend_value(list, value);
	}
	if (sbc->channel_mode & BT_A2DP_CHANNEL_MODE_JOINT_STEREO) {
		g_value_set_static_string(value, "joint");
		gst_value_list_prepend_value(list, value);
	}
	g_value_unset(value);
	if (list) {
		gst_structure_set_value(structure, "mode", list);
		g_free(list);
		list = NULL;
	}

	/* subbands */
	list = g_value_init(g_new0(GValue, 1), GST_TYPE_LIST);
	value = g_value_init(value, G_TYPE_INT);
	if (sbc->subbands & BT_A2DP_SUBBANDS_4) {
		g_value_set_int(value, 4);
		gst_value_list_prepend_value(list, value);
	}
	if (sbc->subbands & BT_A2DP_SUBBANDS_8) {
		g_value_set_int(value, 8);
		gst_value_list_prepend_value(list, value);
	}
	g_value_unset(value);
	if (list) {
		gst_structure_set_value(structure, "subbands", list);
		g_free(list);
		list = NULL;
	}

	/* blocks */
	value = g_value_init(value, G_TYPE_INT);
	list = g_value_init(g_new0(GValue, 1), GST_TYPE_LIST);
	if (sbc->block_length & BT_A2DP_BLOCK_LENGTH_16) {
		g_value_set_int(value, 16);
		gst_value_list_prepend_value(list, value);
	}
	if (sbc->block_length & BT_A2DP_BLOCK_LENGTH_12) {
		g_value_set_int(value, 12);
		gst_value_list_prepend_value(list, value);
	}
	if (sbc->block_length & BT_A2DP_BLOCK_LENGTH_8) {
		g_value_set_int(value, 8);
		gst_value_list_prepend_value(list, value);
	}
	if (sbc->block_length & BT_A2DP_BLOCK_LENGTH_4) {
		g_value_set_int(value, 4);
		gst_value_list_prepend_value(list, value);
	}
	g_value_unset(value);
	if (list) {
		gst_structure_set_value(structure, "blocks", list);
		g_free(list);
		list = NULL;
	}

	/* allocation */
	g_value_init(value, G_TYPE_STRING);
	list = g_value_init(g_new0(GValue,1), GST_TYPE_LIST);
	if (sbc->allocation_method & BT_A2DP_ALLOCATION_LOUDNESS) {
		g_value_set_static_string(value, "loudness");
		gst_value_list_prepend_value(list, value);
	}
	if (sbc->allocation_method & BT_A2DP_ALLOCATION_SNR) {
		g_value_set_static_string(value, "snr");
		gst_value_list_prepend_value(list, value);
	}
	g_value_unset(value);
	if (list) {
		gst_structure_set_value(structure, "allocation", list);
		g_free(list);
		list = NULL;
	}

	/* rate */
	g_value_init(value, G_TYPE_INT);
	list = g_value_init(g_new0(GValue, 1), GST_TYPE_LIST);
	if (sbc->frequency & BT_SBC_SAMPLING_FREQ_48000) {
		g_value_set_int(value, 48000);
		gst_value_list_prepend_value(list, value);
	}
	if (sbc->frequency & BT_SBC_SAMPLING_FREQ_44100) {
		g_value_set_int(value, 44100);
		gst_value_list_prepend_value(list, value);
	}
	if (sbc->frequency & BT_SBC_SAMPLING_FREQ_32000) {
		g_value_set_int(value, 32000);
		gst_value_list_prepend_value(list, value);
	}
	if (sbc->frequency & BT_SBC_SAMPLING_FREQ_16000) {
		g_value_set_int(value, 16000);
		gst_value_list_prepend_value(list, value);
	}
	g_value_unset(value);
	if (list) {
		gst_structure_set_value(structure, "rate", list);
		g_free(list);
		list = NULL;
	}

	/* bitpool */
	value = g_value_init(value, GST_TYPE_INT_RANGE);
	gst_value_set_int_range(value,
			MIN(sbc->min_bitpool, TEMPLATE_MAX_BITPOOL),
			MIN(sbc->max_bitpool, TEMPLATE_MAX_BITPOOL));
	gst_structure_set_value(structure, "bitpool", value);
	g_value_unset(value);

	/* channels */
	mono = FALSE;
	stereo = FALSE;
	if (sbc->channel_mode & BT_A2DP_CHANNEL_MODE_MONO)
		mono = TRUE;
	if ((sbc->channel_mode & BT_A2DP_CHANNEL_MODE_STEREO) ||
			(sbc->channel_mode &
			BT_A2DP_CHANNEL_MODE_DUAL_CHANNEL) ||
			(sbc->channel_mode &
			BT_A2DP_CHANNEL_MODE_JOINT_STEREO))
		stereo = TRUE;

	if (mono && stereo) {
		g_value_init(value, GST_TYPE_INT_RANGE);
		gst_value_set_int_range(value, 1, 2);
	} else {
		g_value_init(value, G_TYPE_INT);
		if (mono)
			g_value_set_int(value, 1);
		else if (stereo)
			g_value_set_int(value, 2);
		else {
			GST_ERROR_OBJECT(self,
				"Unexpected number of channels");
			g_value_set_int(value, 0);
		}
	}

	gst_structure_set_value(structure, "channels", value);
	g_free(value);

	return structure;
}
Exemple #16
0
static GstBuffer *
read_device (int fd, int adapter_number, int frontend_number, int size,
             GstDvbSrc * object)
{
    int count = 0;
    struct pollfd pfd[1];
    int ret_val = 0;
    guint attempts = 0;
    const int TIMEOUT = 100;

    GstBuffer *buf = gst_buffer_new_and_alloc (size);

    g_return_val_if_fail (GST_IS_BUFFER (buf), NULL);

    if (fd < 0) {
        return NULL;
    }

    pfd[0].fd = fd;
    pfd[0].events = POLLIN;

    while (count < size) {
        ret_val = poll (pfd, 1, TIMEOUT);
        if (ret_val > 0) {
            if (pfd[0].revents & POLLIN) {
                int tmp = 0;

                tmp = read (fd, GST_BUFFER_DATA (buf) + count, size - count);
                if (tmp < 0) {
                    GST_WARNING
                    ("Unable to read from device: /dev/dvb/adapter%d/dvr%d (%d)",
                     adapter_number, frontend_number, errno);
                    attempts += 1;
                    if (attempts % 10 == 0) {
                        GST_WARNING
                        ("Unable to read from device after %u attempts: /dev/dvb/adapter%d/dvr%d",
                         attempts, adapter_number, frontend_number);
                    }

                } else
                    count = count + tmp;
            } else {
                GST_LOG ("revents = %d\n", pfd[0].revents);
            }
        } else if (ret_val == 0) {  // poll timeout
            attempts += 1;
            GST_INFO ("Reading from device /dev/dvb/adapter%d/dvr%d timedout (%d)",
                      adapter_number, frontend_number, attempts);

            if (attempts % 10 == 0) {
                GST_WARNING
                ("Unable to read after %u attempts from device: /dev/dvb/adapter%d/dvr%d (%d)",
                 attempts, adapter_number, frontend_number, errno);
                gst_element_post_message (GST_ELEMENT_CAST (object),
                                          gst_message_new_element (GST_OBJECT (object),
                                                  gst_structure_empty_new ("dvb-read-failure")));

            }
        } else if (errno == -EINTR) {       // poll interrupted
            if (attempts % 50 == 0) {
                gst_buffer_unref (buf);
                return NULL;
            };
        }

    }

    GST_BUFFER_SIZE (buf) = count;
    GST_BUFFER_TIMESTAMP (buf) = GST_CLOCK_TIME_NONE;
    return buf;
}
Exemple #17
0
int main(int argc, char * argv[])
{
    GMainLoop *loop;
    GstElement *pipeline, *sink, *mux;
    GstElement *vsrc[NR_PROG];
    GstElement *asrc[NR_PROG];
    GstElement *vparse[NR_PROG];
    GstElement *vdemux[NR_PROG];
    GstElement *aparse[NR_PROG];
    GstPad *tl_pad, *pad;
    GstStructure *pm;
    GstBus *bus;

    FILE * xml_of;

    gchar vname[][60] = {
	"/Users/lyang/src/res/mpts.test/mpts110.mpv",
	"/Users/lyang/src/res/mpts.test/mpts120.mpv",
	"/Users/lyang/src/res/mpts.test/mpts130.mpv",
	"/Users/lyang/src/res/mpts.test/mpts140.mpv",
	"/Users/lyang/src/res/mpts.test/mpts150.mpv",
	"/Users/lyang/src/res/mpts.test/mpts160.mpv",
	"/Users/lyang/src/res/mpts.test/mpts170.mpv"
    };
    gchar aname[][60] = {
	"/Users/lyang/src/res/mpts.test/mpts113.mpa",
	"/Users/lyang/src/res/mpts.test/mpts123.mpa",
	"/Users/lyang/src/res/mpts.test/mpts133.mpa",
	"/Users/lyang/src/res/mpts.test/mpts143.mpa",
	"/Users/lyang/src/res/mpts.test/mpts153.mpa",
	"/Users/lyang/src/res/mpts.test/mpts163.mpa",
	"/Users/lyang/src/res/mpts.test/mpts173.mpa"
    };
    gchar dest_dir[60];
    gchar dest_xml[60];

    gint i;

    gst_init (&argc, &argv);
    loop = g_main_loop_new (NULL, FALSE);

    pipeline = gst_pipeline_new ("mpeg-ts-muxer");
    mux = gst_element_factory_make ("mpegtsmux", "muxer");
    sink = gst_element_factory_make ("filesink", "sink");
    if (!pipeline || !mux || !sink) {
	g_printerr ( "Some element could not be created.\n");
	return -1;
    }

    for(i = 0; i< NR_PROG; i++)
    {
	vsrc[i] = gst_element_factory_make ("filesrc", NULL);
	vdemux[i] = gst_element_factory_make ("mpegpsdemux", NULL);
	vparse[i] = gst_element_factory_make ("mpegvideoparse", NULL);

	asrc[i] = gst_element_factory_make ("filesrc", NULL);
	aparse[i] = gst_element_factory_make ("mp3parse", NULL);

	if (!vsrc[i] || !vparse[i] || !vdemux[i] ||!asrc[i] || !aparse[i])
	{
	    g_printerr ( "Some element could not be created. i=%d.\n", i);
	    return -1;
	}
    }

    /* Setting paths */ 
    for(i = 0; i< NR_PROG; i++)
    {
	g_object_set (G_OBJECT (vsrc[i]), "location", vname[i], NULL);
	g_object_set (G_OBJECT (asrc[i]), "location", aname[i], NULL);
    }

    sprintf (dest_dir, "/Users/lyang/src/res/mpts.test/mpts_%02d.ts", NR_PROG);
    g_object_set (G_OBJECT (sink), "location", dest_dir, NULL);

    /* construct the pipeline */ 
    gst_bin_add_many (GST_BIN (pipeline), mux, sink, NULL);
    gst_element_link (mux, sink);
    for(i = 0; i< NR_PROG; i++)
    {
	gst_bin_add_many (GST_BIN (pipeline), vsrc[i], vdemux[i], vparse[i],
		NULL);
	gst_element_link (vsrc[i], vdemux[i]);

	g_signal_connect (vdemux[i], "pad-added", G_CALLBACK (on_pad_added),
		vparse[i]);

	gst_bin_add_many (GST_BIN (pipeline), asrc[i], aparse[i], NULL);
	gst_element_link (asrc[i], aparse[i]);
    }

    /* construct the program map */ 
    pm = gst_structure_empty_new ("program_map");
    
    /* Program 1 */ 
    for(i = 0; i< NR_PROG; i++)
    {
	/* vparse <-> mux */ 
	tl_pad = gst_element_get_static_pad (vparse[i], "src");
	if (tl_pad == NULL) {
	    g_printerr ("vparse[%d] src pad getting failed.\n", i);
	    return -1;
	}
	pad = gst_element_get_compatible_pad (mux, tl_pad, NULL);
	gst_pad_link (tl_pad, pad);
	gst_structure_set (pm,
		gst_pad_get_name(pad), G_TYPE_INT, i, NULL);
	gst_object_unref (GST_OBJECT (tl_pad));
	gst_object_unref (GST_OBJECT (pad));

	/* aparse <-> mux */ 
	tl_pad = gst_element_get_static_pad (aparse[i], "src");
	if (tl_pad == NULL) {
	    g_printerr ("aparse[%d] src pad getting failed.\n", i);
	    return -1;
	}
	pad = gst_element_get_compatible_pad (mux, tl_pad, NULL);
	gst_pad_link (tl_pad, pad);
	gst_structure_set (pm,
		gst_pad_get_name(pad), G_TYPE_INT, i, NULL);
	gst_object_unref (GST_OBJECT (tl_pad));
	gst_object_unref (GST_OBJECT (pad));
    }

    /* set the program map */ 
    g_object_set (G_OBJECT(mux), "prog-map", pm, NULL);

    bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
    gst_bus_add_watch (bus, bus_call, loop);
    gst_object_unref (bus);

    /* Write the pipeline to XML */ 
    sprintf (dest_xml, "/Users/lyang/src/res/mpts.test/mpts_%02d.xml", NR_PROG);
    xml_of = fopen (dest_xml, "w");
    gst_xml_write_file (GST_ELEMENT (pipeline), xml_of);

    g_print ("Now playing: %s\n", dest_dir);
    gst_element_set_state (pipeline, GST_STATE_PLAYING);

    /* Run! */ 
    g_print ("Running...\n");
    g_main_loop_run (loop);

    /* Out of the main loop, clean up nicely */
    g_print ("Returned, stopping playback\n");
    gst_element_set_state (pipeline, GST_STATE_NULL);

    g_print ("Deleting pipeline\n");
    gst_object_unref (GST_OBJECT (pipeline));

    return 0;

}
Exemple #18
0
/**
 * This helper function is used to construct a custom event to send back
 * upstream when the sink receives the EOS event.  In tunnelled mode,
 * in some cases the upstream component (such as the video/audio decoder)
 * needs to perform some cleanup (such as sending EOS to OMX), which should
 * not happen until the GST layer sink receives the EOS.  The video/audio
 * decoder should not assume that when it receives the EOS, that the sink
 * has also received the EOS, since there may be multiple levels of queuing
 * between the decoder and the sink (ie. in a GstQueue element, and also in
 * the GstBaseSink class).
 */
GstEvent *
gst_goo_event_new_reverse_eos (void)
{
	return gst_event_new_custom ( GST_EVENT_CUSTOM_UPSTREAM,
			gst_structure_empty_new ("GstGooReverseEosEvent") );
}
Exemple #19
0
void eServiceMP3Record::handleUridecNotifySource(GObject *object, GParamSpec *unused, gpointer user_data)
{
	GstElement *source = NULL;
	eServiceMP3Record *_this = (eServiceMP3Record*)user_data;
	g_object_get(object, "source", &source, NULL);
	if (source)
	{
		if (g_object_class_find_property(G_OBJECT_GET_CLASS(source), "ssl-strict") != 0)
		{
			g_object_set(G_OBJECT(source), "ssl-strict", FALSE, NULL);
		}
		if (g_object_class_find_property(G_OBJECT_GET_CLASS(source), "user-agent") != 0 && !_this->m_useragent.empty())
		{
			g_object_set(G_OBJECT(source), "user-agent", _this->m_useragent.c_str(), NULL);
		}
		if (g_object_class_find_property(G_OBJECT_GET_CLASS(source), "extra-headers") != 0 && !_this->m_extra_headers.empty())
		{
#if GST_VERSION_MAJOR < 1
			GstStructure *extras = gst_structure_empty_new("extras");
#else
			GstStructure *extras = gst_structure_new_empty("extras");
#endif
			size_t pos = 0;
			while (pos != std::string::npos)
			{
				std::string name, value;
				size_t start = pos;
				size_t len = std::string::npos;
				pos = _this->m_extra_headers.find('=', pos);
				if (pos != std::string::npos)
				{
					len = pos - start;
					pos++;
					name = _this->m_extra_headers.substr(start, len);
					start = pos;
					len = std::string::npos;
					pos = _this->m_extra_headers.find('&', pos);
					if (pos != std::string::npos)
					{
						len = pos - start;
						pos++;
					}
					value = _this->m_extra_headers.substr(start, len);
				}
				if (!name.empty() && !value.empty())
				{
					GValue header;
					eDebug("[eServiceMP3Record] handleUridecNotifySource setting extra-header '%s:%s'", name.c_str(), value.c_str());
					memset(&header, 0, sizeof(GValue));
					g_value_init(&header, G_TYPE_STRING);
					g_value_set_string(&header, value.c_str());
					gst_structure_set_value(extras, name.c_str(), &header);
				}
				else
				{
					eDebug("[eServiceMP3Record] handleUridecNotifySource invalid header format %s", _this->m_extra_headers.c_str());
					break;
				}
			}
			if (gst_structure_n_fields(extras) > 0)
			{
				g_object_set(G_OBJECT(source), "extra-headers", extras, NULL);
			}
			gst_structure_free(extras);
		}
		gst_object_unref(source);
	}
}
Exemple #20
0
GstCaps *
gst_dc1394_get_cam_caps (GstDc1394 * src)
{

    dc1394camera_t *camera = NULL;
    dc1394camera_list_t *cameras = NULL;
    dc1394error_t camerr;
    gint i, j;
    dc1394video_modes_t modes;
    dc1394framerates_t framerates;
    GstCaps *gcaps = NULL;

    gcaps = gst_caps_new_empty ();

    camerr = dc1394_camera_enumerate (src->dc1394, &cameras);

    if (camerr != DC1394_SUCCESS || cameras == NULL) {
        GST_ELEMENT_ERROR (src, RESOURCE, NOT_FOUND,
                           ("Can't find cameras error : %d", camerr),
                           ("Can't find cameras error : %d", camerr));
        goto error;
    }

    if (cameras->num == 0) {
        GST_ELEMENT_ERROR (src, RESOURCE, NOT_FOUND, ("There were no cameras"),
                           ("There were no cameras"));
        goto error;
    }

    if (src->camnum > (cameras->num - 1)) {
        GST_ELEMENT_ERROR (src, RESOURCE, FAILED, ("Invalid camera number"),
                           ("Invalid camera number %d", src->camnum));
        goto error;
    }

    camera =
        dc1394_camera_new_unit (src->dc1394, cameras->ids[src->camnum].guid,
                                cameras->ids[src->camnum].unit);

    dc1394_camera_free_list (cameras);
    cameras = NULL;

    camerr = dc1394_video_get_supported_modes (camera, &modes);
    if (camerr != DC1394_SUCCESS) {
        GST_ELEMENT_ERROR (src, RESOURCE, FAILED, ("Error getting supported modes"),
                           ("Error getting supported modes"));
        goto error;
    }

    for (i = modes.num - 1; i >= 0; i--) {
        int m = modes.modes[i];

        if (m < DC1394_VIDEO_MODE_EXIF) {

            GstStructure *gs = gst_structure_empty_new ("video");

            gst_structure_set (gs, "vmode", G_TYPE_INT, m, NULL);

            if (gst_dc1394_caps_set_format_vmode_caps (gs, m) < 0) {
                GST_ELEMENT_ERROR (src, STREAM, FAILED,
                                   ("attempt to set mode to %d failed", m),
                                   ("attempt to set mode to %d failed", m));
                goto error;
            } else {

                camerr = dc1394_video_get_supported_framerates (camera, m, &framerates);
                gst_dc1394_caps_set_framerate_list (gs, &framerates);
                gst_caps_append_structure (gcaps, gs);

            }
        } else {
            // FORMAT 7
            guint maxx, maxy;
            GstStructure *gs = gst_structure_empty_new ("video");
            dc1394color_codings_t colormodes;
            guint xunit, yunit;

            gst_structure_set (gs, "vmode", G_TYPE_INT, m, NULL);

            // Get the maximum frame size
            camerr = dc1394_format7_get_max_image_size (camera, m, &maxx, &maxy);
            if (camerr != DC1394_SUCCESS) {
                GST_ELEMENT_ERROR (src, RESOURCE, FAILED,
                                   ("Error getting format 7 max image size"),
                                   ("Error getting format 7 max image size"));
                goto error;
            }
            GST_LOG_OBJECT (src, "Format 7 maxx=%d maxy=%d", maxx, maxy);

            camerr = dc1394_format7_get_unit_size (camera, m, &xunit, &yunit);
            if (camerr != DC1394_SUCCESS) {
                GST_ELEMENT_ERROR (src, RESOURCE, FAILED,
                                   ("Error getting format 7 image unit size"),
                                   ("Error getting format 7 image unit size"));
                goto error;
            }
            GST_LOG_OBJECT (src, "Format 7 unitx=%d unity=%d", xunit, yunit);

            gst_dc1394_set_caps_framesize_range (gs, xunit, maxx, xunit,
                                                 yunit, maxy, yunit);

            // note that format 7 has no concept of a framerate, so we pass the
            // full range
            gst_structure_set (gs,
                               "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);

            // get the available color codings
            camerr = dc1394_format7_get_color_codings (camera, m, &colormodes);
            if (camerr != DC1394_SUCCESS) {
                GST_ELEMENT_ERROR (src, RESOURCE, FAILED,
                                   ("Error getting format 7 color modes"),
                                   ("Error getting format 7 color modes"));
                goto error;
            }

            for (j = 0; j < colormodes.num; j++) {
                GstStructure *newgs = gst_structure_copy (gs);

                gst_dc1394_set_caps_color (newgs, colormodes.codings[j]);
                GST_LOG_OBJECT (src, "Format 7 colormode set : %d",
                                colormodes.codings[j]);
                // note that since there are multiple color modes, we append
                // multiple structures.
                gst_caps_append_structure (gcaps, newgs);
            }
        }
    }

    if (camera) {
        dc1394_camera_free (camera);
    }

    return gcaps;

error:

    if (gcaps) {
        gst_caps_unref (gcaps);
    }

    if (cameras) {
        dc1394_camera_free_list (cameras);
        cameras = NULL;
    }

    if (camera) {
        dc1394_camera_free (camera);
        camera = NULL;
    }

    return NULL;
}
Exemple #21
0
GstStructure *payloadInfoToStructure(const PPayloadInfo &info, const QString &media)
{
	GstStructure *out = gst_structure_empty_new("application/x-rtp");

	{
		GValue gv;
		memset(&gv, 0, sizeof(GValue));
		g_value_init(&gv, G_TYPE_STRING);
		g_value_set_string(&gv, media.toLatin1().data());
		gst_structure_set_value(out, "media", &gv);
	}

	// payload id field required
	if(info.id == -1)
	{
		gst_structure_free(out);
		return 0;
	}

	{
		GValue gv;
		memset(&gv, 0, sizeof(GValue));
		g_value_init(&gv, G_TYPE_INT);
		g_value_set_int(&gv, info.id);
		gst_structure_set_value(out, "payload", &gv);
	}

	// name required for payload values 96 or greater
	if(info.id >= 96 && info.name.isEmpty())
	{
		gst_structure_free(out);
		return 0;
	}

	{
		GValue gv;
		memset(&gv, 0, sizeof(GValue));
		g_value_init(&gv, G_TYPE_STRING);
		g_value_set_string(&gv, info.name.toLatin1().data());
		gst_structure_set_value(out, "encoding-name", &gv);
	}

	if(info.clockrate != -1)
	{
		GValue gv;
		memset(&gv, 0, sizeof(GValue));
		g_value_init(&gv, G_TYPE_INT);
		g_value_set_int(&gv, info.clockrate);
		gst_structure_set_value(out, "clock-rate", &gv);
	}

	if(info.channels != -1)
	{
		GValue gv;
		memset(&gv, 0, sizeof(GValue));
		g_value_init(&gv, G_TYPE_STRING);
		g_value_set_string(&gv, QString::number(info.channels).toLatin1().data());
		gst_structure_set_value(out, "encoding-params", &gv);
	}

	foreach(const PPayloadInfo::Parameter &i, info.parameters)
	{
		QString value = i.value;

		// FIXME: is there a better way to detect when we should do this conversion?
		if(i.name == "configuration" && (info.name.toUpper() == "THEORA" || info.name.toUpper() == "VORBIS"))
		{
			QByteArray config = hexDecode(value);
			if(config.isEmpty())
			{
				gst_structure_free(out);
				return 0;
			}

			value = QString::fromLatin1(config.toBase64());
		}

		GValue gv;
		memset(&gv, 0, sizeof(GValue));
		g_value_init(&gv, G_TYPE_STRING);
		g_value_set_string(&gv, value.toLatin1().data());
		gst_structure_set_value(out, i.name.toLatin1().data(), &gv);
	}

	return out;
}