static GstCaps *
generate_sink_template (void)
{
    GstCaps *caps;
    GstStructure *struc;

    caps = gst_caps_new_empty ();

    struc = gst_structure_new ("video/x-raw-yuv",
                               "width", GST_TYPE_INT_RANGE, 16, 4096,
                               "height", GST_TYPE_INT_RANGE, 16, 4096,
                               "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30, 1,
                               NULL);

    {
        GValue list = { 0 };
        GValue val = { 0 };

        g_value_init (&list, GST_TYPE_LIST);
        g_value_init (&val, GST_TYPE_FOURCC);

        gst_value_set_fourcc (&val, GST_MAKE_FOURCC ('I', '4', '2', '0'));
        gst_value_list_append_value (&list, &val);

        gst_value_set_fourcc (&val, GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'));
        gst_value_list_append_value (&list, &val);

        gst_value_set_fourcc (&val, GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'));
        gst_value_list_append_value (&list, &val);

        gst_structure_set_value (struc, "format", &list);

        g_value_unset (&val);
        g_value_unset (&list);
    }

    gst_caps_append_structure (caps, struc);

    return caps;
}
Exemple #2
0
EXPORT_C
#endif

void
gst_mixer_volume_changed (GstMixer * mixer,
    GstMixerTrack * track, gint * volumes)
{
  GstStructure *s;
  GstMessage *m;
  GValue l = { 0, };
  GValue v = { 0, };
  gint i;

  g_return_if_fail (mixer != NULL);
  g_return_if_fail (GST_IS_ELEMENT (mixer));
  g_return_if_fail (track != NULL);

  s = gst_structure_new (GST_MIXER_MESSAGE_NAME,
      "type", G_TYPE_STRING, "volume-changed",
      "track", GST_TYPE_MIXER_TRACK, track, NULL);

  g_value_init (&l, GST_TYPE_ARRAY);

  g_value_init (&v, G_TYPE_INT);

  /* FIXME 0.11: pass track->num_channels to the function */
  for (i = 0; i < track->num_channels; ++i) {
    g_value_set_int (&v, volumes[i]);
    gst_value_array_append_value (&l, &v);
  }
  g_value_unset (&v);

  gst_structure_set_value (s, "volumes", &l);
  g_value_unset (&l);

  m = gst_message_new_element (GST_OBJECT (mixer), s);
  if (gst_element_post_message (GST_ELEMENT (mixer), m) == FALSE) {
    GST_WARNING ("This element has no bus, therefore no message sent!");
  }
}
static void
kms_base_rtp_session_get_property (GObject * object, guint property_id,
    GValue * value, GParamSpec * pspec)
{
  KmsBaseRtpSession *self = KMS_BASE_RTP_SESSION (object);

  KMS_SDP_SESSION_LOCK (self);

  switch (property_id) {
    case PROP_CONNECTION_STATE:
      g_value_set_enum (value, self->conn_state);
      break;
    case PROP_STATS:{
      gchar *struct_name, *obj_name;
      GstStructure *s;

      obj_name = gst_element_get_name (self);
      struct_name = g_strdup_printf ("%s-stats", obj_name);
      g_free (obj_name);

      /* Video and audio latencies are avery small values in */
      /* nano seconds so there is no harm in casting them to */
      /* uint64 even we might lose a bit of preccision.      */

      s = gst_structure_new (struct_name, "video-e2e-latency",
          G_TYPE_UINT64, (guint64) self->stats->vi, "audio-e2e-latency",
          G_TYPE_UINT64, (guint64) self->stats->ai, NULL);

      g_free (struct_name);
      g_value_take_boxed (value, s);

      break;
    }
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
      break;
  }

  KMS_SDP_SESSION_UNLOCK (self);
}
Exemple #4
0
static void
_local_candidates_prepared (FsStreamTransmitter *stream_transmitter,
    gpointer user_data)
{
  FsRtpStream *self = FS_RTP_STREAM (user_data);
  GstElement *conf = NULL;
  FsRtpSession *session = fs_rtp_stream_get_session (self, NULL);

  if (!session)
    return;

  g_object_get (session, "conference", &conf, NULL);

  gst_element_post_message (conf,
      gst_message_new_element (GST_OBJECT (conf),
          gst_structure_new ("farstream-local-candidates-prepared",
              "stream", FS_TYPE_STREAM, self,
              NULL)));

  gst_object_unref (conf);
  g_object_unref (session);
}
Exemple #5
0
static GstStructure *
get_wave_buffer (BtWavetable * self, guint wave_ix, guint wave_level_ix)
{
  BtWave *wave;
  BtWavelevel *wavelevel;
  GstStructure *s = NULL;

  if ((wave = bt_wavetable_get_wave_by_index (self, wave_ix))) {
    if ((wavelevel = bt_wave_get_level_by_index (wave, wave_level_ix))) {
      GstBuffer *buffer = NULL;
      gpointer *data;
      gulong length;
      guint channels;
      GstBtNote root_note;
      gsize size;

      g_object_get (wave, "channels", &channels, NULL);
      g_object_get (wavelevel, "data", &data, "length", &length, "root-note",
          &root_note, NULL);

      size = channels * length * sizeof (gint16);
      buffer =
          gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY, data, size, 0,
          size, NULL, NULL);

      s = gst_structure_new ("audio/x-raw",     // unused
          "format", G_TYPE_STRING, GST_AUDIO_NE (S16),  // unused
          "layout", G_TYPE_STRING, "interleaved",       // unused
          "rate", G_TYPE_INT, 44100,    // unused
          "channels", G_TYPE_INT, channels,
          "root-note", GSTBT_TYPE_NOTE, (guint) root_note,
          "buffer", GST_TYPE_BUFFER, buffer, NULL);

      g_object_unref (wavelevel);
    }
    g_object_unref (wave);
  }
  return s;
}
Exemple #6
0
static GstCaps *
generate_src_template (void)
{
  GstCaps *caps;

  GstStructure *struc;

  caps = gst_caps_new_empty ();

  struc = gst_structure_new ("audio/mpeg",
      "mpegversion", G_TYPE_INT, 4,
      "rate", GST_TYPE_INT_RANGE, 8000, 96000,
      "channels", GST_TYPE_INT_RANGE, 1, 6, NULL);

  {
    GValue list;
    GValue val;

    list.g_type = val.g_type = 0;

    g_value_init (&list, GST_TYPE_LIST);
    g_value_init (&val, G_TYPE_INT);

    g_value_set_int (&val, 2);
    gst_value_list_append_value (&list, &val);

    g_value_set_int (&val, 4);
    gst_value_list_append_value (&list, &val);

    gst_structure_set_value (struc, "mpegversion", &list);

    g_value_unset (&val);
    g_value_unset (&list);
  }

  gst_caps_append_structure (caps, struc);

  return caps;
}
Exemple #7
0
EXPORT_C
#endif

void
gst_mixer_mute_toggled (GstMixer * mixer, GstMixerTrack * track, gboolean mute)
{
  GstStructure *s;
  GstMessage *m;

  g_return_if_fail (mixer != NULL);
  g_return_if_fail (GST_IS_ELEMENT (mixer));
  g_return_if_fail (track != NULL);

  s = gst_structure_new (GST_MIXER_MESSAGE_NAME,
      "type", G_TYPE_STRING, "mute-toggled",
      "track", GST_TYPE_MIXER_TRACK, track, "mute", G_TYPE_BOOLEAN, mute, NULL);

  m = gst_message_new_element (GST_OBJECT (mixer), s);
  if (gst_element_post_message (GST_ELEMENT (mixer), m) == FALSE) {
    GST_WARNING ("This element has no bus, therefore no message sent!");
  }
}
Exemple #8
0
GstCaps * _owr_payload_create_encoded_caps(OwrPayload *payload)
{
    GstCaps *caps = NULL;

    g_return_val_if_fail(OWR_IS_PAYLOAD(payload), NULL);

    switch (payload->priv->codec_type) {
    case OWR_CODEC_TYPE_H264:
        caps = gst_caps_new_simple("video/x-h264",
            "profile", G_TYPE_STRING, "baseline",
            NULL);
        caps = gst_caps_merge_structure(caps, gst_structure_new("video/x-h264",
            "profile", G_TYPE_STRING, "constrained-baseline", NULL));
        break;
    case OWR_CODEC_TYPE_VP8:
        caps = gst_caps_new_empty_simple("video/x-vp8");
        break;
    default:
        caps = gst_caps_new_any();
    }

    return caps;
}
Exemple #9
0
static GValueArray *
jingle_create_relay_info(const gchar *ip, guint port, const gchar *username,
	const gchar *password, const gchar *relay_type, GValueArray *relay_info)
{
	GValue value;
	GstStructure *turn_setup = gst_structure_new("relay-info",
		"ip", G_TYPE_STRING, ip, 
		"port", G_TYPE_UINT, port,
		"username", G_TYPE_STRING, username,
		"password", G_TYPE_STRING, password,
		"relay-type", G_TYPE_STRING, relay_type,
		NULL);
	purple_debug_info("jabber", "created gst_structure %" GST_PTR_FORMAT "\n", 
		turn_setup);
	if (turn_setup) {
		memset(&value, 0, sizeof(GValue));
		g_value_init(&value, GST_TYPE_STRUCTURE);
		gst_value_set_structure(&value, turn_setup);
		relay_info = g_value_array_append(relay_info, &value);
		gst_structure_free(turn_setup);
	}
	return relay_info;
}
Exemple #10
0
static GstFlowReturn
gst_pnm_src_create (GstPushSrc * psrc, GstBuffer ** buf)
{
  GstPNMSrc *src;
  GstMessage *m;
  gchar *url;

  src = GST_PNM_SRC (psrc);

  if (src->location == NULL)
    return GST_FLOW_ERROR;
  url = g_strdup_printf ("rtsp%s", &src->location[3]);

  /* the only thing we do is redirect to an RTSP url */
  m = gst_message_new_element (GST_OBJECT_CAST (src),
      gst_structure_new ("redirect", "new-location", G_TYPE_STRING, url, NULL));
  g_free (url);

  gst_element_post_message (GST_ELEMENT_CAST (src), m);


  return GST_FLOW_UNEXPECTED;
}
Exemple #11
0
static void
gst_dvbsrc_output_frontend_stats (GstDvbSrc * src)
{
    fe_status_t status;
    uint16_t snr, _signal;
    uint32_t ber, uncorrected_blocks;
    GstMessage *message;
    GstStructure *structure;
    int fe_fd = src->fd_frontend;

    ioctl (fe_fd, FE_READ_STATUS, &status);
    ioctl (fe_fd, FE_READ_SIGNAL_STRENGTH, &_signal);
    ioctl (fe_fd, FE_READ_SNR, &snr);
    ioctl (fe_fd, FE_READ_BER, &ber);
    ioctl (fe_fd, FE_READ_UNCORRECTED_BLOCKS, &uncorrected_blocks);

    structure = gst_structure_new ("dvb-frontend-stats", "status", G_TYPE_INT,
                                   status, "signal", G_TYPE_INT, _signal, "snr", G_TYPE_INT, snr,
                                   "ber", G_TYPE_INT, ber, "unc", G_TYPE_INT, uncorrected_blocks,
                                   "lock", G_TYPE_BOOLEAN, status & FE_HAS_LOCK, NULL);
    message = gst_message_new_element (GST_OBJECT (src), structure);
    gst_element_post_message (GST_ELEMENT (src), message);
}
Exemple #12
0
static GstMessage *
gst_level_message_new (GstLevel * level, GstClockTime timestamp,
    GstClockTime duration)
{
  GstBaseTransform *trans = GST_BASE_TRANSFORM_CAST (level);
  GstStructure *s;
  GValue v = { 0, };
  GstClockTime endtime, running_time, stream_time;

  running_time = gst_segment_to_running_time (&trans->segment, GST_FORMAT_TIME,
      timestamp);
  stream_time = gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME,
      timestamp);
  /* endtime is for backwards compatibility */
  endtime = stream_time + duration;

  s = gst_structure_new ("level",
      "endtime", GST_TYPE_CLOCK_TIME, endtime,
      "timestamp", G_TYPE_UINT64, timestamp,
      "stream-time", G_TYPE_UINT64, stream_time,
      "running-time", G_TYPE_UINT64, running_time,
      "duration", G_TYPE_UINT64, duration, NULL);

  g_value_init (&v, G_TYPE_VALUE_ARRAY);
  g_value_take_boxed (&v, g_value_array_new (0));
  gst_structure_take_value (s, "rms", &v);

  g_value_init (&v, G_TYPE_VALUE_ARRAY);
  g_value_take_boxed (&v, g_value_array_new (0));
  gst_structure_take_value (s, "peak", &v);

  g_value_init (&v, G_TYPE_VALUE_ARRAY);
  g_value_take_boxed (&v, g_value_array_new (0));
  gst_structure_take_value (s, "decay", &v);

  return gst_message_new_element (GST_OBJECT (level), s);
}
Exemple #13
0
static void
gst_multi_file_sink_post_message (GstMultiFileSink * multifilesink,
                                  GstBuffer * buffer, const char *filename)
{
    if (multifilesink->post_messages) {
        GstClockTime duration, timestamp;
        GstClockTime running_time, stream_time;
        guint64 offset, offset_end;
        GstStructure *s;
        GstSegment *segment;
        GstFormat format;

        segment = &GST_BASE_SINK (multifilesink)->segment;
        format = segment->format;

        timestamp = GST_BUFFER_TIMESTAMP (buffer);
        duration = GST_BUFFER_DURATION (buffer);
        offset = GST_BUFFER_OFFSET (buffer);
        offset_end = GST_BUFFER_OFFSET_END (buffer);

        running_time = gst_segment_to_running_time (segment, format, timestamp);
        stream_time = gst_segment_to_stream_time (segment, format, timestamp);

        s = gst_structure_new ("GstMultiFileSink",
                               "filename", G_TYPE_STRING, filename,
                               "index", G_TYPE_INT, multifilesink->index,
                               "timestamp", G_TYPE_UINT64, timestamp,
                               "stream-time", G_TYPE_UINT64, stream_time,
                               "running-time", G_TYPE_UINT64, running_time,
                               "duration", G_TYPE_UINT64, duration,
                               "offset", G_TYPE_UINT64, offset,
                               "offset-end", G_TYPE_UINT64, offset_end, NULL);

        gst_element_post_message (GST_ELEMENT_CAST (multifilesink),
                                  gst_message_new_element (GST_OBJECT_CAST (multifilesink), s));
    }
}
Exemple #14
0
static void
gst_sdlvideosink_base_init (gpointer g_class)
{
  GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
  GstCaps *capslist;
  gint i;
  guint32 formats[] = {
    GST_MAKE_FOURCC ('I', '4', '2', '0'),
    GST_MAKE_FOURCC ('Y', 'V', '1', '2'),
    GST_MAKE_FOURCC ('Y', 'U', 'Y', '2')
        /*
           GST_MAKE_FOURCC ('Y', 'V', 'Y', 'U'),
           GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y')
         */
  };

  /* make a list of all available caps */
  capslist = gst_caps_new_empty ();
  for (i = 0; i < G_N_ELEMENTS (formats); i++) {
    gst_caps_append_structure (capslist,
        gst_structure_new ("video/x-raw-yuv",
            "format", GST_TYPE_FOURCC, formats[i],
            "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
            "height", GST_TYPE_INT_RANGE, 1, G_MAXINT,
            "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 100, 1, NULL));
  }

  sink_template = gst_pad_template_new ("sink",
      GST_PAD_SINK, GST_PAD_ALWAYS, capslist);

  gst_element_class_add_pad_template (element_class, sink_template);
  gst_element_class_set_details_simple (element_class, "SDL video sink",
      "Sink/Video", "An SDL-based videosink",
      "Ronald Bultje <*****@*****.**>, "
      "Edgard Lima <*****@*****.**>, "
      "Jan Schmidt <*****@*****.**>");
}
Exemple #15
0
bool mmsGstSendKeyRelease(GstElement *pipeline, MMSKeySymbol key) {
	if (!pipeline)
		return false;

	// if keysym string is empty, do nothing but return success
	const char *ks = convertMMSKeySymbolToXKeysymString(key);
	if (!*ks)
		return true;

	// construct event
	GstStructure *structure =
		gst_structure_new(	"application/x-gst-navigation",
							"event",	G_TYPE_STRING,	"key-release",
							"key",		G_TYPE_STRING,	ks,
							NULL);
	if (!structure)
		return false;
	GstEvent *event = gst_event_new_navigation(structure);
	if (!event)
		return false;

	// send event
	return gst_element_send_event(pipeline, event);
}
Exemple #16
0
EXPORT_C
#endif

void
gst_mixer_option_changed (GstMixer * mixer,
    GstMixerOptions * opts, gchar * value)
{
  GstStructure *s;
  GstMessage *m;

  g_return_if_fail (mixer != NULL);
  g_return_if_fail (GST_IS_ELEMENT (mixer));
  g_return_if_fail (opts != NULL);

  s = gst_structure_new (GST_MIXER_MESSAGE_NAME,
      "type", G_TYPE_STRING, "option-changed",
      "options", GST_TYPE_MIXER_OPTIONS, opts,
      "value", G_TYPE_STRING, value, NULL);

  m = gst_message_new_element (GST_OBJECT (mixer), s);
  if (gst_element_post_message (GST_ELEMENT (mixer), m) == FALSE) {
    GST_WARNING ("This element has no bus, therefore no message sent!");
  }
}
static GstCaps *
generate_src_template (void)
{
    GstCaps *caps;
    GstStructure *struc;

    caps = gst_caps_new_empty ();

    struc = gst_structure_new ("audio/x-iLBC",
                               NULL);

    {
        GValue list;
        GValue val;

        list.g_type = val.g_type = 0;

        g_value_init (&list, GST_TYPE_LIST);
        g_value_init (&val, G_TYPE_INT);

        g_value_set_int (&val, 20);
        gst_value_list_append_value (&list, &val);

        g_value_set_int (&val, 30);
        gst_value_list_append_value (&list, &val);

        gst_structure_set_value (struc, "mode", &list);

        g_value_unset (&val);
        g_value_unset (&list);
    }

    gst_caps_append_structure (caps, struc);

    return caps;
}
static void
gst_multi_file_sink_post_message_full (GstMultiFileSink * multifilesink,
    GstClockTime timestamp, GstClockTime duration, GstClockTime offset,
    GstClockTime offset_end, GstClockTime running_time,
    GstClockTime stream_time, const char *filename)
{
  GstStructure *s;

  if (!multifilesink->post_messages)
    return;

  s = gst_structure_new ("GstMultiFileSink",
      "filename", G_TYPE_STRING, filename,
      "index", G_TYPE_INT, multifilesink->index,
      "timestamp", G_TYPE_UINT64, timestamp,
      "stream-time", G_TYPE_UINT64, stream_time,
      "running-time", G_TYPE_UINT64, running_time,
      "duration", G_TYPE_UINT64, duration,
      "offset", G_TYPE_UINT64, offset,
      "offset-end", G_TYPE_UINT64, offset_end, NULL);

  gst_element_post_message (GST_ELEMENT_CAST (multifilesink),
      gst_message_new_element (GST_OBJECT_CAST (multifilesink), s));
}
Exemple #19
0
/**
 * fs_msn_stream_add_remote_candidate:
 */
static gboolean
fs_msn_stream_add_remote_candidates (FsStream *stream, GList *candidates,
                                     GError **error)
{
  FsMsnStream *self = FS_MSN_STREAM (stream);
  FsMsnConference *conference = fs_msn_stream_get_conference (self, error);
  FsMsnConnection *conn = NULL;
  gboolean ret = FALSE;

  if (!conference)
    return FALSE;

  GST_OBJECT_LOCK (conference);
  if (self->priv->connection)
    conn = g_object_ref (self->priv->connection);
  GST_OBJECT_UNLOCK (conference);

  if (conn)
  {
    ret = fs_msn_connection_add_remote_candidates (conn, candidates, error);
    g_object_unref (conn);
  }

  if (ret)
    gst_element_post_message (GST_ELEMENT (conference),
        gst_message_new_element (GST_OBJECT (conference),
            gst_structure_new ("farstream-component-state-changed",
                "stream", FS_TYPE_STREAM, self,
                "component", G_TYPE_UINT, 1,
                "state", FS_TYPE_STREAM_STATE, FS_STREAM_STATE_CONNECTING,
                NULL)));

  gst_object_unref (conference);

  return ret;
}
static void
append_relay(GValueArray *relay_info, const gchar *ip, guint port, gchar *type,
	     gchar *username, gchar *password)
{
	GValue value;
	GstStructure *gst_relay_info;

	gst_relay_info = gst_structure_new("relay-info",
			"ip", G_TYPE_STRING, ip,
			"port", G_TYPE_UINT, port,
			"relay-type", G_TYPE_STRING, type,
			"username", G_TYPE_STRING, username,
			"password", G_TYPE_STRING, password,
			NULL);

	if (gst_relay_info) {
		memset(&value, 0, sizeof(GValue));
		g_value_init(&value, GST_TYPE_STRUCTURE);
		gst_value_set_structure(&value, gst_relay_info);

		g_value_array_append(relay_info, &value);
		gst_structure_free(gst_relay_info);
	}
}
Exemple #21
0
static GstFlowReturn
pad_chain(GstPad *pad, GstBuffer *buf)
{
	struct obj *self;
	GstFlowReturn ret = GST_FLOW_OK;
	AVCodecContext *ctx;
	AVFrame *frame;
	int got_pic;
	AVPacket pkt;
	int read;

	self = (struct obj *)((GstObject *)pad)->parent;
	ctx = self->av_ctx;

	if (G_UNLIKELY(!self->initialized)) {
		GstCaps *new_caps;
		GstStructure *struc;

		self->initialized = true;
		if (gst_av_codec_open(ctx, self->codec) < 0) {
			ret = GST_FLOW_ERROR;
			goto leave;
		}

		if (self->parse_func)
			self->parse_func(self, buf);

		new_caps = gst_caps_new_empty();

		struc = gst_structure_new("video/x-raw-yuv",
				"width", G_TYPE_INT, ctx->width,
				"height", G_TYPE_INT, ctx->height,
				"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC('I','4','2','0'),
				NULL);

		if (ctx->time_base.num)
			gst_structure_set(struc,
					"framerate", GST_TYPE_FRACTION,
					ctx->time_base.den, ctx->time_base.num,
					NULL);

		if (ctx->sample_aspect_ratio.num)
			gst_structure_set(struc,
					"pixel-aspect-ratio", GST_TYPE_FRACTION,
					ctx->sample_aspect_ratio.num, ctx->sample_aspect_ratio.den,
					NULL);

		gst_caps_append_structure(new_caps, struc);

		GST_INFO_OBJECT(self, "caps are: %" GST_PTR_FORMAT, new_caps);
		gst_pad_set_caps(self->srcpad, new_caps);
		gst_caps_unref(new_caps);
	}

	av_init_packet(&pkt);
	pkt.data = buf->data;
	pkt.size = buf->size;

	frame = avcodec_alloc_frame();

	read = avcodec_decode_video2(ctx, frame, &got_pic, &pkt);
	if (read < 0) {
		GST_WARNING_OBJECT(self, "error: %i", read);
		goto leave;
	}

	if (got_pic) {
		GstBuffer *out_buf;
		out_buf = convert_frame(self, frame);
		out_buf->timestamp = buf->timestamp;
		out_buf->duration = buf->duration;
		ret = gst_pad_push(self->srcpad, out_buf);
	}

leave:
	gst_buffer_unref(buf);

	return ret;
}
/* chain function
 * this function does the actual processing
 */
static GstFlowReturn
gst_motion_cells_chain (GstPad * pad, GstBuffer * buf)
{

  GstMotioncells *filter;

  filter = gst_motion_cells (GST_OBJECT_PARENT (pad));
  if (filter->calculate_motion) {
    double sensitivity;
    int framerate, gridx, gridy, motionmaskcells_count, motionmaskcoord_count,
        motioncells_count, i;
    int thickness, success, motioncellsidxcnt, numberOfCells,
        motioncellsnumber, cellsOfInterestNumber;
    int mincellsOfInterestNumber, motiondetect;
    char *datafile;
    bool display, changed_datafile, useAlpha;
    gint64 starttime;
    motionmaskcoordrect *motionmaskcoords;
    motioncellidx *motionmaskcellsidx;
    cellscolor motioncellscolor;
    motioncellidx *motioncellsidx;
    g_mutex_lock (filter->propset_mutex);
    buf = gst_buffer_make_writable (buf);
    filter->cvImage->imageData = (char *) GST_BUFFER_DATA (buf);
    if (filter->firstframe) {
      setPrevFrame (filter->cvImage, filter->id);
      filter->firstframe = FALSE;
    }

    sensitivity = filter->sensitivity;
    framerate = filter->framerate;
    gridx = filter->gridx;
    gridy = filter->gridy;
    display = filter->display;
    motionmaskcoord_count = filter->motionmaskcoord_count;
    motionmaskcoords =
        g_new0 (motionmaskcoordrect, filter->motionmaskcoord_count);
    for (i = 0; i < filter->motionmaskcoord_count; i++) {       //we need divide 2 because we use gauss pyramid in C++ side
      motionmaskcoords[i].upper_left_x =
          filter->motionmaskcoords[i].upper_left_x / 2;
      motionmaskcoords[i].upper_left_y =
          filter->motionmaskcoords[i].upper_left_y / 2;
      motionmaskcoords[i].lower_right_x =
          filter->motionmaskcoords[i].lower_right_x / 2;
      motionmaskcoords[i].lower_right_y =
          filter->motionmaskcoords[i].lower_right_y / 2;
    }

    motioncellscolor.R_channel_value =
        filter->motioncellscolor->R_channel_value;
    motioncellscolor.G_channel_value =
        filter->motioncellscolor->G_channel_value;
    motioncellscolor.B_channel_value =
        filter->motioncellscolor->B_channel_value;

    if ((filter->changed_gridx || filter->changed_gridy
            || filter->changed_startime)) {
      if ((g_strcmp0 (filter->cur_datafile, NULL) != 0)) {
        GFREE (filter->cur_datafile);
        filter->datafileidx++;
        filter->cur_datafile =
            g_strdup_printf ("%s-%d.%s", filter->basename_datafile,
            filter->datafileidx, filter->datafile_extension);
        filter->changed_datafile = TRUE;
        motion_cells_free_resources (filter->id);
      }
      if (filter->motioncells_count > 0)
        gst_motioncells_update_motion_cells (filter);
      if (filter->motionmaskcells_count > 0)
        gst_motioncells_update_motion_masks (filter);
      filter->changed_gridx = FALSE;
      filter->changed_gridy = FALSE;
      filter->changed_startime = FALSE;
    }
    datafile = g_strdup (filter->cur_datafile);
    filter->cur_buff_timestamp = (GST_BUFFER_TIMESTAMP (buf) / GST_MSECOND);
    filter->starttime +=
        (filter->cur_buff_timestamp - filter->prev_buff_timestamp);
    starttime = filter->starttime;
    if (filter->changed_datafile || filter->diff_timestamp < 0)
      filter->diff_timestamp =
          (gint64) (GST_BUFFER_TIMESTAMP (buf) / GST_MSECOND);
    changed_datafile = filter->changed_datafile;
    motionmaskcells_count = filter->motionmaskcells_count;
    motionmaskcellsidx = g_new0 (motioncellidx, filter->motionmaskcells_count);
    for (i = 0; i < filter->motionmaskcells_count; i++) {
      motionmaskcellsidx[i].lineidx = filter->motionmaskcellsidx[i].lineidx;
      motionmaskcellsidx[i].columnidx = filter->motionmaskcellsidx[i].columnidx;
    }
    motioncells_count = filter->motioncells_count;
    motioncellsidx = g_new0 (motioncellidx, filter->motioncells_count);
    for (i = 0; i < filter->motioncells_count; i++) {
      motioncellsidx[i].lineidx = filter->motioncellsidx[i].lineidx;
      motioncellsidx[i].columnidx = filter->motioncellsidx[i].columnidx;
    }
    useAlpha = filter->usealpha;
    thickness = filter->thickness;
    success =
        perform_detection_motion_cells (filter->cvImage, sensitivity, framerate,
        gridx, gridy,
        (gint64) (GST_BUFFER_TIMESTAMP (buf) / GST_MSECOND) -
        filter->diff_timestamp, display, useAlpha, motionmaskcoord_count,
        motionmaskcoords, motionmaskcells_count, motionmaskcellsidx,
        motioncellscolor, motioncells_count, motioncellsidx, starttime,
        datafile, changed_datafile, thickness, filter->id);
    if ((success == 1) && (filter->sent_init_error_msg == false)) {
      char *initfailedreason;
      int initerrorcode;
      GstStructure *s;
      GstMessage *m;
      initfailedreason = getInitDataFileFailed (filter->id);
      initerrorcode = getInitErrorCode (filter->id);
      s = gst_structure_new ("motion", "init_error_code", G_TYPE_INT,
          initerrorcode, "details", G_TYPE_STRING, initfailedreason, NULL);
      m = gst_message_new_element (GST_OBJECT (filter), s);
      gst_element_post_message (GST_ELEMENT (filter), m);
      filter->sent_init_error_msg = TRUE;
    }
    if ((success == -1) && (filter->sent_save_error_msg == false)) {
      char *savefailedreason;
      int saveerrorcode;
      GstStructure *s;
      GstMessage *m;
      savefailedreason = getSaveDataFileFailed (filter->id);
      saveerrorcode = getSaveErrorCode (filter->id);
      s = gst_structure_new ("motion", "save_error_code", G_TYPE_INT,
          saveerrorcode, "details", G_TYPE_STRING, savefailedreason, NULL);
      m = gst_message_new_element (GST_OBJECT (filter), s);
      gst_element_post_message (GST_ELEMENT (filter), m);
      filter->sent_save_error_msg = TRUE;
    }
    if (success == -2) {        //frame dropped
      filter->prev_buff_timestamp = filter->cur_buff_timestamp;
      //free
      GFREE (datafile);
      GFREE (motionmaskcoords);
      GFREE (motionmaskcellsidx);
      GFREE (motioncellsidx);
      g_mutex_unlock (filter->propset_mutex);
      return gst_pad_push (filter->srcpad, buf);
    }
    filter->changed_datafile = getChangedDataFile (filter->id);
    motioncellsidxcnt = getMotionCellsIdxCnt (filter->id);
    numberOfCells = filter->gridx * filter->gridy;
    motioncellsnumber = motioncellsidxcnt / MSGLEN;
    cellsOfInterestNumber = (filter->motioncells_count > 0) ?   //how many cells interest for us
        (filter->motioncells_count) : (numberOfCells);
    mincellsOfInterestNumber =
        floor ((double) cellsOfInterestNumber * filter->threshold);
    motiondetect = (motioncellsnumber >= mincellsOfInterestNumber) ? 1 : 0;
    if ((motioncellsidxcnt > 0) && (motiondetect == 1)) {
      char *detectedmotioncells;
      filter->last_motion_timestamp = GST_BUFFER_TIMESTAMP (buf);
      detectedmotioncells = getMotionCellsIdx (filter->id);
      if (detectedmotioncells) {
        filter->consecutive_motion++;
        if ((filter->previous_motion == false)
            && (filter->consecutive_motion >= filter->minimum_motion_frames)) {
          GstStructure *s;
          GstMessage *m;
          filter->previous_motion = true;
          filter->motion_begin_timestamp = GST_BUFFER_TIMESTAMP (buf);
          s = gst_structure_new ("motion", "motion_cells_indices",
              G_TYPE_STRING, detectedmotioncells, "motion_begin", G_TYPE_UINT64,
              filter->motion_begin_timestamp, NULL);
          m = gst_message_new_element (GST_OBJECT (filter), s);
          gst_element_post_message (GST_ELEMENT (filter), m);
        } else if (filter->postallmotion) {
          GstStructure *s;
          GstMessage *m;
          filter->motion_timestamp = GST_BUFFER_TIMESTAMP (buf);
          s = gst_structure_new ("motion", "motion_cells_indices",
              G_TYPE_STRING, detectedmotioncells, "motion", G_TYPE_UINT64,
              filter->motion_timestamp, NULL);
          m = gst_message_new_element (GST_OBJECT (filter), s);
          gst_element_post_message (GST_ELEMENT (filter), m);
        }
      } else {
        GstStructure *s;
        GstMessage *m;
        s = gst_structure_new ("motion", "motion_cells_indices", G_TYPE_STRING,
            "error", NULL);
        m = gst_message_new_element (GST_OBJECT (filter), s);
        gst_element_post_message (GST_ELEMENT (filter), m);
      }
    } else {
      filter->consecutive_motion = 0;
      if ((((GST_BUFFER_TIMESTAMP (buf) -
                      filter->last_motion_timestamp) / 1000000000l) >=
              filter->gap)
          && (filter->last_motion_timestamp > 0)) {
        GST_DEBUG ("POST MOTION FINISHED MSG\n");
        if (filter->previous_motion) {
          GstStructure *s;
          GstMessage *m;
          filter->previous_motion = false;
          s = gst_structure_new ("motion", "motion_finished", G_TYPE_UINT64,
              filter->last_motion_timestamp, NULL);
          m = gst_message_new_element (GST_OBJECT (filter), s);
          gst_element_post_message (GST_ELEMENT (filter), m);
        }
      }
    }
    if (filter->postnomotion > 0) {
      guint64 last_buf_timestamp = GST_BUFFER_TIMESTAMP (buf) / 1000000000l;
      if ((last_buf_timestamp -
              (filter->last_motion_timestamp / 1000000000l)) >=
          filter->postnomotion) {
        GST_DEBUG ("POST NO MOTION MSG\n");
        if ((last_buf_timestamp -
                (filter->last_nomotion_notified / 1000000000l)) >=
            filter->postnomotion) {
          GstStructure *s;
          GstMessage *m;
          filter->last_nomotion_notified = GST_BUFFER_TIMESTAMP (buf);
          s = gst_structure_new ("motion", "no_motion", G_TYPE_UINT64,
              filter->last_motion_timestamp, NULL);
          m = gst_message_new_element (GST_OBJECT (filter), s);
          gst_element_post_message (GST_ELEMENT (filter), m);
        }
      }
    }
    filter->prev_buff_timestamp = filter->cur_buff_timestamp;
    //free
    GFREE (datafile);
    GFREE (motionmaskcoords);
    GFREE (motionmaskcellsidx);
    GFREE (motioncellsidx);

    g_mutex_unlock (filter->propset_mutex);
  }

  return gst_pad_push (filter->srcpad, buf);
}
static void
settings_changed_cb (GOmxCore * core)
{
  GstOmxBaseFilter *omx_base;
  GstOmxBaseVideoDec *self;
  guint width;
  guint height;
  guint32 format = 0;

  omx_base = core->object;
  self = GST_OMX_BASE_VIDEODEC (omx_base);

  GST_DEBUG_OBJECT (omx_base, "settings changed");

  {
    OMX_PARAM_PORTDEFINITIONTYPE param;

    G_OMX_INIT_PARAM (param);

    param.nPortIndex = omx_base->out_port->port_index;
    OMX_GetParameter (omx_base->gomx->omx_handle, OMX_IndexParamPortDefinition,
        &param);

    width = param.format.video.nFrameWidth;
    height = param.format.video.nFrameHeight;
    switch ((guint)param.format.video.eColorFormat) {
      case OMX_COLOR_FormatYUV420Planar:
      case OMX_COLOR_FormatYUV420PackedPlanar:
        format = GST_MAKE_FOURCC ('I', '4', '2', '0');
        break;
      case OMX_COLOR_FormatYCbYCr:
        format = GST_MAKE_FOURCC ('Y', 'U', 'Y', '2');
        break;
      case OMX_COLOR_FormatCbYCrY:
        format = GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y');
        break;
      /* MODIFICATION: Add extended_color_format */
      case OMX_EXT_COLOR_FormatNV12TPhysicalAddress:
        format = GST_MAKE_FOURCC ('S', 'T', '1', '2');
        break;
      case OMX_EXT_COLOR_FormatNV12LPhysicalAddress:
        format = GST_MAKE_FOURCC ('S', 'N', '1', '2');
        break;
      case OMX_COLOR_FormatYUV420SemiPlanar:
        format = GST_MAKE_FOURCC ('N', 'V', '1', '2');
        break;
      default:
        break;
    }
  }

  {
    GstCaps *new_caps;
    GstStructure *struc;

    new_caps = gst_caps_new_empty ();
    struc = gst_structure_new ("video/x-raw-yuv",
        "width", G_TYPE_INT, width,
        "height", G_TYPE_INT, height, "format", GST_TYPE_FOURCC, format, NULL);

    if (self->framerate_denom != 0)
      gst_structure_set (struc, "framerate", GST_TYPE_FRACTION,
          self->framerate_num, self->framerate_denom, NULL);
    else
      /* FIXME this is a workaround for xvimagesink */
      gst_structure_set (struc, "framerate", GST_TYPE_FRACTION, 0, 1, NULL);

    gst_caps_append_structure (new_caps, struc);

    GST_INFO_OBJECT (omx_base, "caps are: %" GST_PTR_FORMAT, new_caps);
    gst_pad_set_caps (omx_base->srcpad, new_caps);
    gst_caps_unref (new_caps); /* Modification: unref caps */
  }
}
/* This function is called when new metadata is discovered in the stream */
static void tags_cb (GstElement *playbin, gint stream, CustomData *data) {
  /* We are possibly in a GStreamer working thread, so we notify the main
   * thread of this event through a message in the bus */
gst_element_post_message(playbin,gst_message_new_application(GST_OBJECT (playbin),gst_structure_new("tags-changed",(gchar *)NULL)));
}
static GstCaps *
gst_openal_helper_probe_caps (ALCcontext * context)
{
  static const struct
  {
    gint count;
    GstAudioChannelPosition positions[8];
  } chans[] = {
    {
      1, {
      GST_AUDIO_CHANNEL_POSITION_MONO}
    }, {
      2, {
      GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
            GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT}
    }, {
      4, {
      GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
            GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
            GST_AUDIO_CHANNEL_POSITION_REAR_LEFT,
            GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT}
    }, {
      6, {
      GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
            GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
            GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
            GST_AUDIO_CHANNEL_POSITION_LFE1,
            GST_AUDIO_CHANNEL_POSITION_REAR_LEFT,
            GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT}
    }, {
      7, {
      GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
            GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
            GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
            GST_AUDIO_CHANNEL_POSITION_LFE1,
            GST_AUDIO_CHANNEL_POSITION_REAR_CENTER,
            GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT,
            GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT}
    }, {
      8, {
      GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
            GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
            GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
            GST_AUDIO_CHANNEL_POSITION_LFE1,
            GST_AUDIO_CHANNEL_POSITION_REAR_LEFT,
            GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT,
            GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT,
            GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT}
  },};
  GstStructure *structure;
  guint64 channel_mask;
  GstCaps *caps;
  ALCcontext *old;

  old = pushContext (context);

  caps = gst_caps_new_empty ();

  if (alIsExtensionPresent ("AL_EXT_MCFORMATS")) {
    const char *fmt32[] = {
      "AL_FORMAT_MONO_FLOAT32",
      "AL_FORMAT_STEREO_FLOAT32",
      "AL_FORMAT_QUAD32",
      "AL_FORMAT_51CHN32",
      "AL_FORMAT_61CHN32",
      "AL_FORMAT_71CHN32",
      NULL
    }, *fmt16[] = {
    "AL_FORMAT_MONO16",
          "AL_FORMAT_STEREO16",
          "AL_FORMAT_QUAD16",
          "AL_FORMAT_51CHN16",
          "AL_FORMAT_61CHN16", "AL_FORMAT_71CHN16", NULL}, *fmt8[] = {
    "AL_FORMAT_MONO8",
          "AL_FORMAT_STEREO8",
          "AL_FORMAT_QUAD8",
          "AL_FORMAT_51CHN8", "AL_FORMAT_61CHN8", "AL_FORMAT_71CHN8", NULL};
    int i;

    if (alIsExtensionPresent ("AL_EXT_FLOAT32")) {
      for (i = 0; fmt32[i]; i++) {
        ALenum value = alGetEnumValue (fmt32[i]);
        if (checkALError () != AL_NO_ERROR || value == 0 || value == -1)
          continue;

        structure =
            gst_structure_new ("audio/x-raw", "format", G_TYPE_STRING,
            GST_AUDIO_NE (F32), "rate", GST_TYPE_INT_RANGE, OPENAL_MIN_RATE,
            OPENAL_MAX_RATE, "channels", G_TYPE_INT, chans[i].count, NULL);
        if (chans[i].count > 2) {
          gst_audio_channel_positions_to_mask (chans[i].positions,
              chans[i].count, FALSE, &channel_mask);
          gst_structure_set (structure, "channel-mask", GST_TYPE_BITMASK,
              channel_mask, NULL);
        }
        gst_caps_append_structure (caps, structure);
      }
    }

    for (i = 0; fmt16[i]; i++) {
      ALenum value = alGetEnumValue (fmt16[i]);
      if (checkALError () != AL_NO_ERROR || value == 0 || value == -1)
        continue;

      structure =
          gst_structure_new ("audio/x-raw", "format", G_TYPE_STRING,
          GST_AUDIO_NE (S16), "rate", GST_TYPE_INT_RANGE, OPENAL_MIN_RATE,
          OPENAL_MAX_RATE, "channels", G_TYPE_INT, chans[i].count, NULL);
      if (chans[i].count > 2) {
        gst_audio_channel_positions_to_mask (chans[i].positions, chans[i].count,
            FALSE, &channel_mask);
        gst_structure_set (structure, "channel-mask", GST_TYPE_BITMASK,
            channel_mask, NULL);
      }
      gst_caps_append_structure (caps, structure);
    }
    for (i = 0; fmt8[i]; i++) {
      ALenum value = alGetEnumValue (fmt8[i]);
      if (checkALError () != AL_NO_ERROR || value == 0 || value == -1)
        continue;

      structure =
          gst_structure_new ("audio/x-raw", "format", G_TYPE_STRING,
          G_STRINGIFY (U8), "rate", GST_TYPE_INT_RANGE, OPENAL_MIN_RATE,
          OPENAL_MAX_RATE, "channels", G_TYPE_INT, chans[i].count, NULL);
      if (chans[i].count > 2) {
        gst_audio_channel_positions_to_mask (chans[i].positions, chans[i].count,
            FALSE, &channel_mask);
        gst_structure_set (structure, "channel-mask", GST_TYPE_BITMASK,
            channel_mask, NULL);
      }
      gst_caps_append_structure (caps, structure);
    }
  } else {
    if (alIsExtensionPresent ("AL_EXT_FLOAT32")) {
      structure =
          gst_structure_new ("audio/x-raw", "format", G_TYPE_STRING,
          GST_AUDIO_NE (F32), "rate", GST_TYPE_INT_RANGE, OPENAL_MIN_RATE,
          OPENAL_MAX_RATE, "channels", GST_TYPE_INT_RANGE, 1, 2, NULL);
      gst_caps_append_structure (caps, structure);
    }

    structure =
        gst_structure_new ("audio/x-raw", "format", G_TYPE_STRING,
        GST_AUDIO_NE (S16), "rate", GST_TYPE_INT_RANGE, OPENAL_MIN_RATE,
        OPENAL_MAX_RATE, "channels", GST_TYPE_INT_RANGE, 1, 2, NULL);
    gst_caps_append_structure (caps, structure);

    structure =
        gst_structure_new ("audio/x-raw", "format", G_TYPE_STRING,
        G_STRINGIFY (U8), "rate", GST_TYPE_INT_RANGE, OPENAL_MIN_RATE,
        OPENAL_MAX_RATE, "channels", GST_TYPE_INT_RANGE, 1, 2, NULL);
    gst_caps_append_structure (caps, structure);
  }

  if (alIsExtensionPresent ("AL_EXT_double")) {
    structure =
        gst_structure_new ("audio/x-raw", "format", G_TYPE_STRING,
        GST_AUDIO_NE (F64), "rate", GST_TYPE_INT_RANGE, OPENAL_MIN_RATE,
        OPENAL_MAX_RATE, "channels", GST_TYPE_INT_RANGE, 1, 2, NULL);
    gst_caps_append_structure (caps, structure);
  }

  if (alIsExtensionPresent ("AL_EXT_IMA4")) {
    structure =
        gst_structure_new ("audio/x-adpcm", "layout", G_TYPE_STRING, "ima",
        "rate", GST_TYPE_INT_RANGE, OPENAL_MIN_RATE, OPENAL_MAX_RATE,
        "channels", GST_TYPE_INT_RANGE, 1, 2, NULL);
    gst_caps_append_structure (caps, structure);
  }

  if (alIsExtensionPresent ("AL_EXT_ALAW")) {
    structure =
        gst_structure_new ("audio/x-alaw", "rate", GST_TYPE_INT_RANGE,
        OPENAL_MIN_RATE, OPENAL_MAX_RATE, "channels", GST_TYPE_INT_RANGE, 1, 2,
        NULL);
    gst_caps_append_structure (caps, structure);
  }

  if (alIsExtensionPresent ("AL_EXT_MULAW_MCFORMATS")) {
    const char *fmtmulaw[] = {
      "AL_FORMAT_MONO_MULAW",
      "AL_FORMAT_STEREO_MULAW",
      "AL_FORMAT_QUAD_MULAW",
      "AL_FORMAT_51CHN_MULAW",
      "AL_FORMAT_61CHN_MULAW",
      "AL_FORMAT_71CHN_MULAW",
      NULL
    };
    int i;

    for (i = 0; fmtmulaw[i]; i++) {
      ALenum value = alGetEnumValue (fmtmulaw[i]);
      if (checkALError () != AL_NO_ERROR || value == 0 || value == -1)
        continue;

      structure =
          gst_structure_new ("audio/x-mulaw", "rate", GST_TYPE_INT_RANGE,
          OPENAL_MIN_RATE, OPENAL_MAX_RATE, "channels", G_TYPE_INT,
          chans[i].count, NULL);
      if (chans[i].count > 2) {
        gst_audio_channel_positions_to_mask (chans[i].positions, chans[i].count,
            FALSE, &channel_mask);
        gst_structure_set (structure, "channel-mask", GST_TYPE_BITMASK,
            channel_mask, NULL);
      }
      gst_caps_append_structure (caps, structure);
    }
  } else if (alIsExtensionPresent ("AL_EXT_MULAW")) {
    structure =
        gst_structure_new ("audio/x-mulaw", "rate", GST_TYPE_INT_RANGE,
        OPENAL_MIN_RATE, OPENAL_MAX_RATE, "channels", GST_TYPE_INT_RANGE, 1, 2,
        NULL);
    gst_caps_append_structure (caps, structure);
  }

  popContext (old, context);

  return caps;
}
Exemple #26
0
GstCaps *
gst_xvid_csp_to_caps (gint csp, gint w, gint h)
{
  GstStructure *structure = NULL;

  switch (csp) {
    case XVID_CSP_RGB555:
    case XVID_CSP_RGB565:
    case XVID_CSP_BGR:
    case XVID_CSP_ABGR:
    case XVID_CSP_BGRA:
#ifdef XVID_CSP_ARGB
    case XVID_CSP_ARGB:
#endif
    case XVID_CSP_RGBA:{
      gint r_mask = 0, b_mask = 0, g_mask = 0,
          endianness = 0, bpp = 0, depth = 0;

      switch (csp) {
        case XVID_CSP_RGB555:
          r_mask = GST_VIDEO_RED_MASK_15_INT;
          g_mask = GST_VIDEO_GREEN_MASK_15_INT;
          b_mask = GST_VIDEO_BLUE_MASK_15_INT;
          endianness = G_BYTE_ORDER;
          depth = 15;
          bpp = 16;
          break;
        case XVID_CSP_RGB565:
          r_mask = GST_VIDEO_RED_MASK_16_INT;
          g_mask = GST_VIDEO_GREEN_MASK_16_INT;
          b_mask = GST_VIDEO_BLUE_MASK_16_INT;
          endianness = G_BYTE_ORDER;
          depth = 16;
          bpp = 16;
          break;
        case XVID_CSP_BGR:
          r_mask = 0x0000ff;
          g_mask = 0x00ff00;
          b_mask = 0xff0000;
          endianness = G_BIG_ENDIAN;
          depth = 24;
          bpp = 24;
          break;
        case XVID_CSP_ABGR:
          r_mask = 0x000000ff;
          g_mask = 0x0000ff00;
          b_mask = 0x00ff0000;
          endianness = G_BIG_ENDIAN;
          depth = 24;
          bpp = 32;
          break;
        case XVID_CSP_BGRA:
          r_mask = 0x0000ff00;
          g_mask = 0x00ff0000;
          b_mask = 0xff000000;
          endianness = G_BIG_ENDIAN;
          depth = 24;
          bpp = 32;
          break;
#ifdef XVID_CSP_ARGB
        case XVID_CSP_ARGB:
          r_mask = 0x00ff0000;
          g_mask = 0x0000ff00;
          b_mask = 0x000000ff;
          endianness = G_BIG_ENDIAN;
          depth = 24;
          bpp = 32;
          break;
#endif
        case XVID_CSP_RGBA:
          r_mask = 0xff000000;
          g_mask = 0x00ff0000;
          b_mask = 0x0000ff00;
          endianness = G_BIG_ENDIAN;
          depth = 24;
          bpp = 32;
          break;
      }

      structure = gst_structure_new ("video/x-raw-rgb",
          "width", G_TYPE_INT, w,
          "height", G_TYPE_INT, h,
          "depth", G_TYPE_INT, depth,
          "bpp", G_TYPE_INT, bpp,
          "endianness", G_TYPE_INT, endianness,
          "red_mask", G_TYPE_INT, r_mask,
          "green_mask", G_TYPE_INT, g_mask,
          "blue_mask", G_TYPE_INT, b_mask, NULL);
      break;
    }

    case XVID_CSP_YUY2:
    case XVID_CSP_YVYU:
    case XVID_CSP_UYVY:
    case XVID_CSP_I420:
    case XVID_CSP_YV12:{
      guint32 fourcc = 0;

      switch (csp) {
        case XVID_CSP_YUY2:
          fourcc = GST_MAKE_FOURCC ('Y', 'U', 'Y', '2');
          break;
        case XVID_CSP_YVYU:
          fourcc = GST_MAKE_FOURCC ('Y', 'V', 'Y', 'U');
          break;
        case XVID_CSP_UYVY:
          fourcc = GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y');
          break;
        case XVID_CSP_I420:
          fourcc = GST_MAKE_FOURCC ('I', '4', '2', '0');
          break;
        case XVID_CSP_YV12:
          fourcc = GST_MAKE_FOURCC ('Y', 'V', '1', '2');
          break;
      }

      structure = gst_structure_new ("video/x-raw-yuv",
          "width", G_TYPE_INT, w,
          "height", G_TYPE_INT, h, "format", GST_TYPE_FOURCC, fourcc, NULL);
      break;
    }
  }

  return gst_caps_new_full (structure, NULL);
}
static GstBuffer *
gst_rtp_dtmf_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
{

  GstRtpDTMFDepay *rtpdtmfdepay = NULL;
  GstBuffer *outbuf = NULL;
  gint payload_len;
  guint8 *payload = NULL;
  guint32 timestamp;
  GstRTPDTMFPayload dtmf_payload;
  gboolean marker;
  GstStructure *structure = NULL;
  GstMessage *dtmf_message = NULL;

  rtpdtmfdepay = GST_RTP_DTMF_DEPAY (depayload);

  if (!gst_rtp_buffer_validate (buf))
    goto bad_packet;

  payload_len = gst_rtp_buffer_get_payload_len (buf);
  payload = gst_rtp_buffer_get_payload (buf);

  if (payload_len != sizeof (GstRTPDTMFPayload))
    goto bad_packet;

  memcpy (&dtmf_payload, payload, sizeof (GstRTPDTMFPayload));

  if (dtmf_payload.event > MAX_EVENT)
    goto bad_packet;


  marker = gst_rtp_buffer_get_marker (buf);

  timestamp = gst_rtp_buffer_get_timestamp (buf);

  dtmf_payload.duration = g_ntohs (dtmf_payload.duration);

  /* clip to whole units of unit_time */
  if (rtpdtmfdepay->unit_time) {
    guint unit_time_clock =
        (rtpdtmfdepay->unit_time * depayload->clock_rate) / 1000;
    if (dtmf_payload.duration % unit_time_clock) {
      /* Make sure we don't overflow the duration */
      if (dtmf_payload.duration < G_MAXUINT16 - unit_time_clock)
        dtmf_payload.duration += unit_time_clock -
            (dtmf_payload.duration % unit_time_clock);
      else
        dtmf_payload.duration -= dtmf_payload.duration % unit_time_clock;
    }
  }

  /* clip to max duration */
  if (rtpdtmfdepay->max_duration) {
    guint max_duration_clock =
        (rtpdtmfdepay->max_duration * depayload->clock_rate) / 1000;

    if (max_duration_clock < G_MAXUINT16 &&
        dtmf_payload.duration > max_duration_clock)
      dtmf_payload.duration = max_duration_clock;
  }

  GST_DEBUG_OBJECT (depayload, "Received new RTP DTMF packet : "
      "marker=%d - timestamp=%u - event=%d - duration=%d",
      marker, timestamp, dtmf_payload.event, dtmf_payload.duration);

  GST_DEBUG_OBJECT (depayload,
      "Previous information : timestamp=%u - duration=%d",
      rtpdtmfdepay->previous_ts, rtpdtmfdepay->previous_duration);

  /* First packet */
  if (marker || rtpdtmfdepay->previous_ts != timestamp) {
    rtpdtmfdepay->sample = 0;
    rtpdtmfdepay->previous_ts = timestamp;
    rtpdtmfdepay->previous_duration = dtmf_payload.duration;
    rtpdtmfdepay->first_gst_ts = GST_BUFFER_TIMESTAMP (buf);

    structure = gst_structure_new ("dtmf-event",
        "number", G_TYPE_INT, dtmf_payload.event,
        "volume", G_TYPE_INT, dtmf_payload.volume,
        "type", G_TYPE_INT, 1, "method", G_TYPE_INT, 1, NULL);
    if (structure) {
      dtmf_message =
          gst_message_new_element (GST_OBJECT (depayload), structure);
      if (dtmf_message) {
        if (!gst_element_post_message (GST_ELEMENT (depayload), dtmf_message)) {
          GST_ERROR_OBJECT (depayload,
              "Unable to send dtmf-event message to bus");
        }
      } else {
        GST_ERROR_OBJECT (depayload, "Unable to create dtmf-event message");
      }
    } else {
      GST_ERROR_OBJECT (depayload, "Unable to create dtmf-event structure");
    }
  } else {
    guint16 duration = dtmf_payload.duration;
    dtmf_payload.duration -= rtpdtmfdepay->previous_duration;
    /* If late buffer, ignore */
    if (duration > rtpdtmfdepay->previous_duration)
      rtpdtmfdepay->previous_duration = duration;
  }

  GST_DEBUG_OBJECT (depayload, "new previous duration : %d - new duration : %d"
      " - diff  : %d - clock rate : %d - timestamp : %llu",
      rtpdtmfdepay->previous_duration, dtmf_payload.duration,
      (rtpdtmfdepay->previous_duration - dtmf_payload.duration),
      depayload->clock_rate, GST_BUFFER_TIMESTAMP (buf));

  /* If late or duplicate packet (like the redundant end packet). Ignore */
  if (dtmf_payload.duration > 0) {
    outbuf = gst_buffer_new ();
    gst_dtmf_src_generate_tone (rtpdtmfdepay, dtmf_payload, outbuf);


    GST_BUFFER_TIMESTAMP (outbuf) = rtpdtmfdepay->first_gst_ts +
        (rtpdtmfdepay->previous_duration - dtmf_payload.duration) *
        GST_SECOND / depayload->clock_rate;
    GST_BUFFER_OFFSET (outbuf) =
        (rtpdtmfdepay->previous_duration - dtmf_payload.duration) *
        GST_SECOND / depayload->clock_rate;
    GST_BUFFER_OFFSET_END (outbuf) = rtpdtmfdepay->previous_duration *
        GST_SECOND / depayload->clock_rate;

    GST_DEBUG_OBJECT (depayload, "timestamp : %llu - time %" GST_TIME_FORMAT,
        GST_BUFFER_TIMESTAMP (buf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)));

  }

  return outbuf;


bad_packet:
  GST_ELEMENT_WARNING (rtpdtmfdepay, STREAM, DECODE,
      ("Packet did not validate"), (NULL));
  return NULL;
}
static gboolean
gst_glimage_sink_propose_allocation (GstBaseSink * bsink, GstQuery * query)
{
  GstGLImageSink *glimage_sink = GST_GLIMAGE_SINK (bsink);
  GstBufferPool *pool;
  GstStructure *config;
  GstCaps *caps;
  guint size;
  gboolean need_pool;
  GstStructure *gl_context;
  gchar *platform, *gl_apis;
  gpointer handle;

  if (!_ensure_gl_setup (glimage_sink))
    return FALSE;

  gst_query_parse_allocation (query, &caps, &need_pool);

  if (caps == NULL)
    goto no_caps;

  if ((pool = glimage_sink->pool))
    gst_object_ref (pool);

  if (pool != NULL) {
    GstCaps *pcaps;

    /* we had a pool, check caps */
    GST_DEBUG_OBJECT (glimage_sink, "check existing pool caps");
    config = gst_buffer_pool_get_config (pool);
    gst_buffer_pool_config_get_params (config, &pcaps, &size, NULL, NULL);

    if (!gst_caps_is_equal (caps, pcaps)) {
      GST_DEBUG_OBJECT (glimage_sink, "pool has different caps");
      /* different caps, we can't use this pool */
      gst_object_unref (pool);
      pool = NULL;
    }
    gst_structure_free (config);
  }

  if (pool == NULL && need_pool) {
    GstVideoInfo info;

    if (!gst_video_info_from_caps (&info, caps))
      goto invalid_caps;

    GST_DEBUG_OBJECT (glimage_sink, "create new pool");
    pool = gst_gl_buffer_pool_new (glimage_sink->context);

    /* the normal size of a frame */
    size = info.size;

    config = gst_buffer_pool_get_config (pool);
    gst_buffer_pool_config_set_params (config, caps, size, 0, 0);
    if (!gst_buffer_pool_set_config (pool, config))
      goto config_failed;
  }
  /* we need at least 2 buffer because we hold on to the last one */
  gst_query_add_allocation_pool (query, pool, size, 2, 0);

  /* we also support various metadata */
  gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, 0);

  gst_object_unref (pool);

  gl_apis =
      gst_gl_api_to_string (gst_gl_context_get_gl_api (glimage_sink->context));
  platform =
      gst_gl_platform_to_string (gst_gl_context_get_gl_platform
      (glimage_sink->context));
  handle = (gpointer) gst_gl_context_get_gl_context (glimage_sink->context);

  gl_context =
      gst_structure_new ("GstVideoGLTextureUploadMeta", "gst.gl.GstGLContext",
      GST_GL_TYPE_CONTEXT, glimage_sink->context, "gst.gl.context.handle",
      G_TYPE_POINTER, handle, "gst.gl.context.type", G_TYPE_STRING, platform,
      "gst.gl.context.apis", G_TYPE_STRING, gl_apis, NULL);
  gst_query_add_allocation_meta (query,
      GST_VIDEO_GL_TEXTURE_UPLOAD_META_API_TYPE, gl_context);

  g_free (gl_apis);
  g_free (platform);
  gst_structure_free (gl_context);

  return TRUE;

  /* ERRORS */
no_caps:
  {
    GST_DEBUG_OBJECT (bsink, "no caps specified");
    return FALSE;
  }
invalid_caps:
  {
    GST_DEBUG_OBJECT (bsink, "invalid caps specified");
    return FALSE;
  }
config_failed:
  {
    GST_DEBUG_OBJECT (bsink, "failed setting config");
    return FALSE;
  }
}
static GstElement *
create_audio_payloader (GstRTSPCamMediaFactory *factory,
    GstElement *bin, gint payloader_number)
{
  GstElement *encoder;
  GstElement *pay;
  GstElement *audiosrc;
  GstElement *audioconvert;
  GstElement *audiorate;
  GstElement *capsfilter;
  gchar *audio_formats[] = {"audio/x-raw-float",
      "audio/x-raw-int", NULL};
  GstCaps *audio_caps;
  gchar *capss;
  gboolean linked;
  int i;

  encoder = create_encoder (factory, factory->audio_codec);
  if (encoder == NULL) {
    GST_WARNING_OBJECT (factory, "couldn't create encoder ");
    return NULL;
  }

  pay = create_payloader (factory, factory->audio_codec, payloader_number, AUDIO_PAYLOAD_TYPE);
  if (pay == NULL) {
    GST_WARNING_OBJECT (factory, "couldn't create payloader ");
    gst_object_unref (encoder);
    return NULL;
  }

  audiosrc = gst_element_factory_make(factory->audio_source, NULL);
  if (audiosrc == NULL) {
    GST_WARNING_OBJECT (factory, "couldn't create audio source");
    gst_object_unref (encoder);
    gst_object_unref (pay);

    return NULL;
  }
  else if (!g_strcmp0 (factory->audio_source, "jackaudiosrc"))
      g_object_set(audiosrc, "connect", 2, NULL);

  audioconvert = gst_element_factory_make ("audioconvert", NULL);
  audiorate = gst_element_factory_make ("audiorate", NULL);
  capsfilter = gst_element_factory_make ("capsfilter", NULL);

  audio_caps = gst_caps_new_empty ();
  for (i = 0; audio_formats[i] != NULL; i++) {
    GstStructure *structure = gst_structure_new (audio_formats[i], NULL);

    if (factory->audio_channels != -1)
      gst_structure_set (structure, "channels", G_TYPE_INT, factory->audio_channels, NULL);

    gst_caps_append_structure (audio_caps, structure);
  }

  capss = gst_caps_to_string (audio_caps);
  GST_INFO_OBJECT (factory, "setting audio caps %s", capss);
  g_free (capss);

  g_object_set (capsfilter, "caps", audio_caps, NULL);
  gst_caps_unref (audio_caps);

  gst_bin_add_many (GST_BIN (bin), audiosrc, capsfilter, audioconvert, audiorate, encoder, pay, NULL);
  linked = gst_element_link_many (audiosrc, capsfilter, audioconvert, audiorate, encoder, pay, NULL);
  if (!linked) {
      gst_object_unref (bin);
      return NULL;
  }
  return pay;
}
static GstElement *
create_video_payloader (GstRTSPCamMediaFactory *factory,
    GstElement *bin, gint payloader_number)
{
  GstElement *encoder;
  GstElement *pay;
  GstElement *videosrc;
  GstElement *queue, *ffmpegcolorspace, *videoscale, *videorate;
  GstElement *capsfilter;
  gchar *image_formats[] = {"video/x-raw-rgb",
      "video/x-raw-yuv", "video/x-raw-gray", NULL};
  GstCaps *video_caps;
  gchar *capss;
  gboolean linked;
  int i;

  encoder = create_encoder (factory, factory->video_codec);
  if (encoder == NULL)
    return NULL;
  if (factory->video_bitrate != -1)
    g_object_set (encoder, "bitrate", factory->video_bitrate, NULL);

  pay = create_payloader (factory, factory->video_codec, payloader_number, VIDEO_PAYLOAD_TYPE);
  if (pay == NULL)
    return NULL;

  videosrc = gst_element_factory_make (factory->video_source, NULL);
  if (!g_strcmp0 (factory->video_source, "videotestsrc"))
      g_object_set (videosrc, "is-live", TRUE, NULL);
  else if (factory->video_device) /* don't set device for testsrc */
    g_object_set (videosrc, "device", factory->video_device, NULL);

  queue = gst_element_factory_make ("queue", NULL);
  ffmpegcolorspace = gst_element_factory_make ("ffmpegcolorspace", NULL);
  videorate = gst_element_factory_make ("videorate", NULL);
  videoscale = gst_element_factory_make ("videoscale", NULL);
  capsfilter = gst_element_factory_make ("capsfilter", NULL);

  video_caps = gst_caps_new_empty ();
  for (i = 0; image_formats[i] != NULL; i++) {
    GstStructure *structure = gst_structure_new (image_formats[i], NULL);

    if (factory->video_width != -1)
      gst_structure_set (structure, "width", G_TYPE_INT, factory->video_width, NULL);

    if (factory->video_height != -1)
      gst_structure_set (structure, "height", G_TYPE_INT, factory->video_height, NULL);

    if (factory->fps_n != 0 && factory->fps_d != 0)
      gst_structure_set (structure, "framerate", GST_TYPE_FRACTION,
          factory->fps_n, factory->fps_d, NULL);

    gst_caps_append_structure (video_caps, structure);
  }

  capss = gst_caps_to_string (video_caps);
  GST_INFO_OBJECT (factory, "setting video caps %s", capss);
  g_free (capss);

  g_object_set (capsfilter, "caps", video_caps, NULL);
  gst_caps_unref (video_caps);

  gst_bin_add_many (GST_BIN (bin), videosrc, queue, ffmpegcolorspace, videoscale,
      videorate, capsfilter, encoder, pay, NULL);
  linked = gst_element_link_many (videosrc, queue, videorate, ffmpegcolorspace, videoscale,
      capsfilter, encoder, pay, NULL);
  g_assert(linked);

  return pay;
}