Пример #1
0
static void
gst_v4l2src_init (GstV4l2Src * v4l2src, GstV4l2SrcClass * klass)
{
  /* fixme: give an update_fps_function */
  v4l2src->v4l2object = gst_v4l2_object_new (GST_ELEMENT (v4l2src),
      V4L2_BUF_TYPE_VIDEO_CAPTURE, DEFAULT_PROP_DEVICE,
      gst_v4l2_get_input, gst_v4l2_set_input, NULL);

  /* number of buffers requested */
  v4l2src->num_buffers = PROP_DEF_QUEUE_SIZE;

  v4l2src->always_copy = PROP_DEF_ALWAYS_COPY;
  v4l2src->decimate = PROP_DEF_DECIMATE;

  v4l2src->is_capturing = FALSE;

  gst_base_src_set_format (GST_BASE_SRC (v4l2src), GST_FORMAT_TIME);
  gst_base_src_set_live (GST_BASE_SRC (v4l2src), TRUE);

  v4l2src->fps_d = 0;
  v4l2src->fps_n = 0;
}
Пример #2
0
static void
gst_base_audio_src_init (GstBaseAudioSrc * baseaudiosrc,
    GstBaseAudioSrcClass * g_class)
{
  baseaudiosrc->priv = GST_BASE_AUDIO_SRC_GET_PRIVATE (baseaudiosrc);

  baseaudiosrc->buffer_time = DEFAULT_BUFFER_TIME;
  baseaudiosrc->latency_time = DEFAULT_LATENCY_TIME;
  baseaudiosrc->priv->provide_clock = DEFAULT_PROVIDE_CLOCK;
  baseaudiosrc->priv->slave_method = DEFAULT_SLAVE_METHOD;
  /* reset blocksize we use latency time to calculate a more useful 
   * value based on negotiated format. */
  GST_BASE_SRC (baseaudiosrc)->blocksize = 0;

  baseaudiosrc->clock = gst_audio_clock_new ("GstAudioSrcClock",
      (GstAudioClockGetTimeFunc) gst_base_audio_src_get_time, baseaudiosrc);

  /* we are always a live source */
  gst_base_src_set_live (GST_BASE_SRC (baseaudiosrc), TRUE);
  /* we operate in time */
  gst_base_src_set_format (GST_BASE_SRC (baseaudiosrc), GST_FORMAT_TIME);
}
Пример #3
0
static void
gst_v4l2src_init (GstV4l2Src * v4l2src, GstV4l2SrcClass * klass)
{
  /* fixme: give an update_fps_function */
  v4l2src->v4l2object = gst_v4l2_object_new (GST_ELEMENT (v4l2src),
      gst_v4l2_get_input, gst_v4l2_set_input, NULL);

  /* number of buffers requested */
  v4l2src->num_buffers = GST_V4L2_MIN_BUFFERS;

  v4l2src->always_copy = DEFAULT_PROP_ALWAYS_COPY;

  v4l2src->formats = NULL;

  v4l2src->is_capturing = FALSE;

  gst_base_src_set_format (GST_BASE_SRC (v4l2src), GST_FORMAT_TIME);
  gst_base_src_set_live (GST_BASE_SRC (v4l2src), TRUE);

  v4l2src->fps_d = 0;
  v4l2src->fps_n = 0;
}
/**
 * shell_recorder_src_add_buffer:
 *
 * Adds a buffer to the internal queue to be pushed out at the next opportunity.
 * There is no flow control, so arbitrary amounts of memory may be used by
 * the buffers on the queue. The buffer contents must match the #GstCaps
 * set in the :caps property.
 */
void
shell_recorder_src_add_buffer (ShellRecorderSrc *src,
			       GstBuffer        *buffer)
{
  g_return_if_fail (SHELL_IS_RECORDER_SRC (src));
  g_return_if_fail (src->caps != NULL);

  gst_base_src_set_caps (GST_BASE_SRC (src), src->caps);
  shell_recorder_src_update_memory_used (src,
					 (int)(gst_buffer_get_size(buffer) / 1024));

  g_async_queue_push (src->queue, gst_buffer_ref (buffer));
}
Пример #5
0
/*
 * Set the value of a property for the client src.
 */
static void
gst_dccp_client_src_set_property (GObject * object, guint prop_id,
    const GValue * value, GParamSpec * pspec)
{
  GstDCCPClientSrc *src = GST_DCCP_CLIENT_SRC (object);

  switch (prop_id) {
    case PROP_PORT:
      src->port = g_value_get_int (value);
      break;
    case PROP_HOST:
      if (!g_value_get_string (value)) {
        g_warning ("host property cannot be NULL");
        break;
      }
      g_free (src->host);
      src->host = g_strdup (g_value_get_string (value));
      break;
    case PROP_SOCK_FD:
      src->sock_fd = g_value_get_int (value);
      break;
    case PROP_CLOSED:
      src->closed = g_value_get_boolean (value);
      break;
    case PROP_CCID:
      src->ccid = g_value_get_int (value);
      break;
    case PROP_CAPS:
    {
      const GstCaps *new_caps_val = gst_value_get_caps (value);
      GstCaps *new_caps;
      GstCaps *old_caps;

      if (new_caps_val == NULL) {
        new_caps = gst_caps_new_any ();
      } else {
        new_caps = gst_caps_copy (new_caps_val);
      }

      old_caps = src->caps;
      src->caps = new_caps;
      if (old_caps)
        gst_caps_unref (old_caps);
      gst_pad_set_caps (GST_BASE_SRC (src)->srcpad, new_caps);
      break;
    }
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }
}
Пример #6
0
static void
gst_dx9screencapsrc_init (GstDX9ScreenCapSrc * src)
{
  /* Set src element inital values... */
  src->surface = NULL;
  src->d3d9_device = NULL;
  src->capture_x = 0;
  src->capture_y = 0;
  src->capture_w = 0;
  src->capture_h = 0;

  src->monitor = 0;
  src->show_cursor = FALSE;
  src->monitor_info.cbSize = sizeof(MONITORINFO);

  gst_base_src_set_format (GST_BASE_SRC (src), GST_FORMAT_TIME);
  gst_base_src_set_live (GST_BASE_SRC (src), TRUE);

  if (!g_d3d9)
    g_d3d9 = Direct3DCreate9 (D3D_SDK_VERSION);
  else
    IDirect3D9_AddRef (g_d3d9);
}
Пример #7
0
static void gst_devsound_src_init(GstDevsoundSrc * devsoundsrc)
    {
    GST_DEBUG_OBJECT(devsoundsrc, "initializing devsoundsrc");
    gst_base_src_set_live(GST_BASE_SRC(devsoundsrc), TRUE);
    //gst_debug_log(devsound_debug, GST_LEVEL_LOG, "", "", 0, (GObject *) devsoundsrc, "gst_devsound_src_init ENTER ",NULL);
    devsoundsrc->device = g_strdup(DEFAULT_DEVICE);
    devsoundsrc->handle=NULL;
    devsoundsrc->preference = 0; //default=>EMdaPriorityPreferenceNone;
    devsoundsrc->priority = 0;   //default=>EMdaPriorityNormal;
    devsoundsrc->firstTimeInit = kUnInitialized;
//    pthread_mutex_init(&create_mutex1, NULL);
//    pthread_cond_init(&create_condition1, NULL);
    //gst_debug_log(devsound_debug, GST_LEVEL_LOG, "", "", 0, (GObject *) devsoundsrc, "gst_devsound_src_init EXIT ",NULL);
    }
Пример #8
0
static void gst_nanomsgsrc_get_property(GObject *object, guint prop_id, GValue *value, GParamSpec *pspec)
{
	GstNanomsgSrc *nanomsgsrc = GST_NANOMSGSRC(object);
	switch (prop_id)
	{
		case PROP_URI:
			LOCK_SRC_MUTEX(nanomsgsrc);
			g_value_set_string(value, nanomsgsrc->uri);
			UNLOCK_SRC_MUTEX(nanomsgsrc);
			break;

		case PROP_TIMEOUT:
			LOCK_SRC_MUTEX(nanomsgsrc);
			g_value_set_uint64(value, nanomsgsrc->timeout);
			UNLOCK_SRC_MUTEX(nanomsgsrc);
			break;

		case PROP_PROTOCOL:
			LOCK_SRC_MUTEX(nanomsgsrc);
			g_value_set_enum(value, nanomsgsrc->protocol);
			UNLOCK_SRC_MUTEX(nanomsgsrc);
			break;

		case PROP_IPV4ONLY:
			LOCK_SRC_MUTEX(nanomsgsrc);
			g_value_set_boolean(value, nanomsgsrc->ipv4only);
			UNLOCK_SRC_MUTEX(nanomsgsrc);
			break;

		case PROP_RCVBUFSIZE:
			LOCK_SRC_MUTEX(nanomsgsrc);
			g_value_set_int(value, nanomsgsrc->rcvbufsize);
			UNLOCK_SRC_MUTEX(nanomsgsrc);
			break;

		case PROP_SUBSCRIPTION_TOPIC:
			LOCK_SRC_MUTEX(nanomsgsrc);
			g_value_set_string(value, nanomsgsrc->subscription_topic);
			UNLOCK_SRC_MUTEX(nanomsgsrc);
			break;

		case PROP_IS_LIVE:
			g_value_set_boolean(value, gst_base_src_is_live(GST_BASE_SRC(object)));
			break;

		default:
			G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
			break;
	}
}
Пример #9
0
static gboolean
gst_avdtp_src_start (GstBaseSrc * bsrc)
{
  GstAvdtpSrc *avdtpsrc = GST_AVDTP_SRC (bsrc);

  /* None of this can go into prepare() since we need to set up the
   * connection to figure out what format the device is going to send us.
   */

  if (!gst_avdtp_connection_acquire (&avdtpsrc->conn, FALSE)) {
    GST_ERROR_OBJECT (avdtpsrc, "Failed to acquire connection");
    return FALSE;
  }

  if (!gst_avdtp_connection_get_properties (&avdtpsrc->conn)) {
    GST_ERROR_OBJECT (avdtpsrc, "Failed to get transport properties");
    goto fail;
  }

  if (!gst_avdtp_connection_conf_recv_stream_fd (&avdtpsrc->conn)) {
    GST_ERROR_OBJECT (avdtpsrc, "Failed to configure stream fd");
    goto fail;
  }

  GST_DEBUG_OBJECT (avdtpsrc, "Setting block size to link MTU (%d)",
      avdtpsrc->conn.data.link_mtu);
  gst_base_src_set_blocksize (GST_BASE_SRC (avdtpsrc),
      avdtpsrc->conn.data.link_mtu);

  avdtpsrc->dev_caps = gst_avdtp_connection_get_caps (&avdtpsrc->conn);
  if (!avdtpsrc->dev_caps) {
    GST_ERROR_OBJECT (avdtpsrc, "Failed to get device caps");
    goto fail;
  }

  gst_poll_fd_init (&avdtpsrc->pfd);
  avdtpsrc->pfd.fd = g_io_channel_unix_get_fd (avdtpsrc->conn.stream);

  gst_poll_add_fd (avdtpsrc->poll, &avdtpsrc->pfd);
  gst_poll_fd_ctl_read (avdtpsrc->poll, &avdtpsrc->pfd, TRUE);
  gst_poll_set_flushing (avdtpsrc->poll, FALSE);

  g_atomic_int_set (&avdtpsrc->unlocked, FALSE);

  return TRUE;

fail:
  gst_avdtp_connection_release (&avdtpsrc->conn);
  return FALSE;
}
Пример #10
0
static void
gst_gdiscreencapsrc_init (GstGDIScreenCapSrc * src,
    GstGDIScreenCapSrcClass * klass)
{
  /* Set src element inital values... */
  GstPad *src_pad = GST_BASE_SRC_PAD (src);
  gst_pad_set_fixatecaps_function (src_pad, gst_gdiscreencapsrc_fixate);

  src->frames = 0;
  src->dibMem = NULL;
  src->hBitmap = (HBITMAP) INVALID_HANDLE_VALUE;
  src->memDC = (HDC) INVALID_HANDLE_VALUE;
  src->capture_x = 0;
  src->capture_y = 0;
  src->capture_w = 0;
  src->capture_h = 0;

  src->monitor = 0;
  src->show_cursor = FALSE;

  gst_base_src_set_format (GST_BASE_SRC (src), GST_FORMAT_TIME);
  gst_base_src_set_live (GST_BASE_SRC (src), TRUE);
}
Пример #11
0
static void
gst_app_src_set_property (GObject * object, guint prop_id,
    const GValue * value, GParamSpec * pspec)
{
  GstAppSrc *appsrc = GST_APP_SRC_CAST (object);
  GstAppSrcPrivate *priv = appsrc->priv;

  switch (prop_id) {
    case PROP_CAPS:
      gst_app_src_set_caps (appsrc, gst_value_get_caps (value));
      break;
    case PROP_SIZE:
      gst_app_src_set_size (appsrc, g_value_get_int64 (value));
      break;
    case PROP_STREAM_TYPE:
      gst_app_src_set_stream_type (appsrc, g_value_get_enum (value));
      break;
    case PROP_MAX_BYTES:
      gst_app_src_set_max_bytes (appsrc, g_value_get_uint64 (value));
      break;
    case PROP_FORMAT:
      priv->format = g_value_get_enum (value);
      break;
    case PROP_BLOCK:
      priv->block = g_value_get_boolean (value);
      break;
    case PROP_IS_LIVE:
      gst_base_src_set_live (GST_BASE_SRC (appsrc),
          g_value_get_boolean (value));
      break;
    case PROP_MIN_LATENCY:
      gst_app_src_set_latencies (appsrc, TRUE, g_value_get_int64 (value),
          FALSE, -1);
      break;
    case PROP_MAX_LATENCY:
      gst_app_src_set_latencies (appsrc, FALSE, -1, TRUE,
          g_value_get_int64 (value));
      break;
    case PROP_EMIT_SIGNALS:
      gst_app_src_set_emit_signals (appsrc, g_value_get_boolean (value));
      break;
    case PROP_MIN_PERCENT:
      priv->min_percent = g_value_get_uint (value);
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }
}
Пример #12
0
static void
gst_rpi_cam_src_init (GstRpiCamSrc * src)
{
  GstColorBalanceChannel *channel;

  gst_base_src_set_format (GST_BASE_SRC (src), GST_FORMAT_TIME);
  gst_base_src_set_live (GST_BASE_SRC (src), TRUE);
  raspicapture_default_config (&src->capture_config);
  src->capture_config.intraperiod = KEYFRAME_INTERVAL_DEFAULT;
  src->capture_config.verbose = 1;

  g_mutex_init (&src->config_lock);

  /* Don't let basesrc set timestamps, we'll do it using
   * buffer PTS and system times */
  gst_base_src_set_do_timestamp (GST_BASE_SRC (src), FALSE);

  /* Generate the channels list */
  channel = g_object_new (GST_TYPE_COLOR_BALANCE_CHANNEL, NULL);
  channel->label = g_strdup ("CONTRAST");
  channel->min_value = -100;
  channel->max_value = 100;
  src->channels = g_list_append (src->channels, channel);

  channel = g_object_new (GST_TYPE_COLOR_BALANCE_CHANNEL, NULL);
  channel->label = g_strdup ("BRIGHTNESS");
  channel->min_value = 0;
  channel->max_value = 100;
  src->channels = g_list_append (src->channels, channel);

  channel = g_object_new (GST_TYPE_COLOR_BALANCE_CHANNEL, NULL);
  channel->label = g_strdup ("SATURATION");
  channel->min_value = -100;
  channel->max_value = 100;
  src->channels = g_list_append (src->channels, channel);
}
Пример #13
0
static void
gst_rtmp_src_init (GstRTMPSrc * rtmpsrc)
{
#ifdef G_OS_WIN32
  WSADATA wsa_data;

  if (WSAStartup (MAKEWORD (2, 2), &wsa_data) != 0) {
    GST_ERROR_OBJECT (rtmpsrc, "WSAStartup failed: 0x%08x", WSAGetLastError ());
  }
#endif

  rtmpsrc->cur_offset = 0;
  rtmpsrc->last_timestamp = 0;

  gst_base_src_set_format (GST_BASE_SRC (rtmpsrc), GST_FORMAT_TIME);
}
/* initialize the new element
 * instantiate pads and add them to element
 * set pad calback functions
 * initialize instance structure
 */
static void gst_espeak_init (GstEspeak * self, GstEspeakClass * gclass) {
    self->text = NULL;
    self->pitch = 0;
    self->rate = 0;
    self->voice = g_strdup (ESPEAK_DEFAULT_VOICE);
    self->voices = espeak_get_voices ();
    self->speak = espeak_new (GST_ELEMENT (self));

    self->caps = gst_caps_new_simple ("audio/x-raw-int",
            "rate", G_TYPE_INT, espeak_get_sample_rate (),
            "channels", G_TYPE_INT, 1,
            "endianness", G_TYPE_INT, G_BYTE_ORDER,
            "width", G_TYPE_INT, 16,
            "depth", G_TYPE_INT, 16, "signed", G_TYPE_BOOLEAN, TRUE, NULL);

    gst_base_src_set_format (GST_BASE_SRC (self), GST_FORMAT_DEFAULT);
}
Пример #15
0
static void
gst_ks_video_src_init (GstKsVideoSrc * self, GstKsVideoSrcClass * gclass)
{
  GstKsVideoSrcPrivate *priv = GST_KS_VIDEO_SRC_GET_PRIVATE (self);
  GstBaseSrc *basesrc = GST_BASE_SRC (self);

  gst_base_src_set_live (basesrc, TRUE);
  gst_base_src_set_format (basesrc, GST_FORMAT_TIME);

  gst_ks_video_src_reset (self);

  priv->device_path = DEFAULT_DEVICE_PATH;
  priv->device_name = DEFAULT_DEVICE_NAME;
  priv->device_index = DEFAULT_DEVICE_INDEX;
  priv->do_stats = DEFAULT_DO_STATS;
  priv->enable_quirks = DEFAULT_ENABLE_QUIRKS;
}
Пример #16
0
/* FIXME operating in TIME rather than BYTES could remove this altogether
 * and be more convenient elsewhere */
static gboolean
gst_mms_query (GstBaseSrc * src, GstQuery * query)
{
  GstMMS *mmssrc = GST_MMS (src);
  gboolean res = TRUE;
  GstFormat format;
  gint64 value;

  switch (GST_QUERY_TYPE (query)) {
    case GST_QUERY_POSITION:
      gst_query_parse_position (query, &format, &value);
      if (format != GST_FORMAT_BYTES) {
        res = FALSE;
        break;
      }
      value = (gint64) mmsx_get_current_pos (mmssrc->connection);
      gst_query_set_position (query, format, value);
      break;
    case GST_QUERY_DURATION:
      if (!mmsx_get_seekable (mmssrc->connection)) {
        res = FALSE;
        break;
      }
      gst_query_parse_duration (query, &format, &value);
      switch (format) {
        case GST_FORMAT_BYTES:
          value = (gint64) mmsx_get_length (mmssrc->connection);
          gst_query_set_duration (query, format, value);
          break;
        case GST_FORMAT_TIME:
          value = mmsx_get_time_length (mmssrc->connection) * GST_SECOND;
          gst_query_set_duration (query, format, value);
          break;
        default:
          res = FALSE;
      }
      break;
    default:
      /* chain to parent */
      res =
          GST_BASE_SRC_CLASS (parent_class)->query (GST_BASE_SRC (src), query);
      break;
  }

  return res;
}
Пример #17
0
static void
gst_dc1394_init (GstDc1394 * src, GstDc1394Class * g_class)
{

    src->segment_start_frame = -1;
    src->segment_end_frame = -1;
    src->timestamp_offset = 0;
    src->caps = gst_dc1394_get_all_dc1394_caps ();
    src->bufsize = 10;
    src->iso_speed = 400;
    src->camnum = 0;
    src->n_frames = 0;

    gst_pad_set_fixatecaps_function (GST_BASE_SRC_PAD (src),
                                     gst_dc1394_src_fixate);

    gst_base_src_set_live (GST_BASE_SRC (src), TRUE);
}
Пример #18
0
/**
 * overrides the default buffer allocation for output port to allow
 * pad_alloc'ing from the srcpad
 */
static GstBuffer *
buffer_alloc (GOmxPort *port, gint len)
{
    GstOmxBaseSrc  *self = port->core->object;
    GstBaseSrc *gst_base = GST_BASE_SRC (self);
    GstBuffer *buf;
    GstFlowReturn ret;

    check_settings (self->out_port, gst_base->srcpad);

    ret = gst_pad_alloc_buffer_and_set_caps (
              gst_base->srcpad, GST_BUFFER_OFFSET_NONE,
              len, GST_PAD_CAPS (gst_base->srcpad), &buf);

    if (ret == GST_FLOW_OK) return buf;

    return NULL;
}
Пример #19
0
static GstStateChangeReturn
gst_openni2_src_change_state (GstElement * element, GstStateChange transition)
{
  GstStateChangeReturn ret = GST_STATE_CHANGE_FAILURE;
  GstOpenni2Src *src = GST_OPENNI2_SRC (element);

  switch (transition) {
    case GST_STATE_CHANGE_NULL_TO_READY:
      /* Action! */
      if (!openni2_initialise_devices (src))
        return GST_STATE_CHANGE_FAILURE;
      break;
    case GST_STATE_CHANGE_READY_TO_PAUSED:
      break;
    case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
      break;
    default:
      break;
  }

  ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
  if (ret == GST_STATE_CHANGE_FAILURE) {
    return ret;
  }

  switch (transition) {
    case GST_STATE_CHANGE_READY_TO_NULL:
      gst_openni2_src_stop (GST_BASE_SRC (src));
      if (src->gst_caps) {
        gst_caps_unref (src->gst_caps);
        src->gst_caps = NULL;
      }
      break;
    case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
      break;
    case GST_STATE_CHANGE_PAUSED_TO_READY:
      src->oni_start_ts = GST_CLOCK_TIME_NONE;
      break;
    default:
      break;
  }

  return ret;
}
Пример #20
0
static void
gst_test_http_src_init (GstTestHTTPSrc * src)
{
  g_mutex_init (&src->mutex);
  src->uri = NULL;
  memset (&src->input, 0, sizeof (src->input));
  src->compress = FALSE;
  src->keep_alive = FALSE;
  src->http_method_name = NULL;
  src->http_method = METHOD_GET;
  src->user_agent = NULL;
  src->position = 0;
  src->segment_end = 0;
  src->http_headers_event = NULL;
  src->duration_changed = FALSE;
  if (gst_test_http_src_blocksize)
    gst_base_src_set_blocksize (GST_BASE_SRC (src),
        gst_test_http_src_blocksize);
}
Пример #21
0
static void
gst_shm_src_get_property (GObject * object, guint prop_id,
    GValue * value, GParamSpec * pspec)
{
  GstShmSrc *self = GST_SHM_SRC (object);

  switch (prop_id) {
    case PROP_SOCKET_PATH:
      GST_OBJECT_LOCK (object);
      g_value_set_string (value, self->socket_path);
      GST_OBJECT_UNLOCK (object);
      break;
    case PROP_IS_LIVE:
      g_value_set_boolean (value, gst_base_src_is_live (GST_BASE_SRC (object)));
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }
}
Пример #22
0
static void
gst_video_test_src_set_property (GObject * object, guint prop_id,
    const GValue * value, GParamSpec * pspec)
{
  GstVideoTestSrc *src = GST_VIDEO_TEST_SRC (object);

  switch (prop_id) {
    case PROP_PATTERN:
      gst_video_test_src_set_pattern (src, g_value_get_enum (value));
      break;
    case PROP_TIMESTAMP_OFFSET:
      src->timestamp_offset = g_value_get_int64 (value);
      break;
    case PROP_IS_LIVE:
      gst_base_src_set_live (GST_BASE_SRC (src), g_value_get_boolean (value));
      break;
    default:
      break;
  }
}
static void
gst_rfb_src_init (GstRfbSrc * src)
{
  GstBaseSrc *bsrc = GST_BASE_SRC (src);

  gst_pad_use_fixed_caps (GST_BASE_SRC_PAD (bsrc));
  gst_base_src_set_live (bsrc, TRUE);
  gst_base_src_set_format (bsrc, GST_FORMAT_TIME);

  src->host = g_strdup ("127.0.0.1");
  src->port = 5900;
  src->version_major = 3;
  src->version_minor = 3;

  src->incremental_update = TRUE;

  src->view_only = FALSE;

  src->decoder = rfb_decoder_new ();
}
Пример #24
0
static void
gst_app_src_init (GstAppSrc * appsrc, GstAppSrcClass * klass)
{
  appsrc->priv = G_TYPE_INSTANCE_GET_PRIVATE (appsrc, GST_TYPE_APP_SRC,
      GstAppSrcPrivate);

  appsrc->priv->mutex = g_mutex_new ();
  appsrc->priv->cond = g_cond_new ();
  appsrc->priv->queue = g_queue_new ();

  appsrc->priv->size = DEFAULT_PROP_SIZE;
  appsrc->priv->stream_type = DEFAULT_PROP_STREAM_TYPE;
  appsrc->priv->max_bytes = DEFAULT_PROP_MAX_BYTES;
  appsrc->priv->format = DEFAULT_PROP_FORMAT;
  appsrc->priv->block = DEFAULT_PROP_BLOCK;
  appsrc->priv->min_latency = DEFAULT_PROP_MIN_LATENCY;
  appsrc->priv->max_latency = DEFAULT_PROP_MAX_LATENCY;
  appsrc->priv->emit_signals = DEFAULT_PROP_EMIT_SIGNALS;

  gst_base_src_set_live (GST_BASE_SRC (appsrc), DEFAULT_PROP_IS_LIVE);
}
Пример #25
0
static void
gst_video_test_src_get_property (GObject * object, guint prop_id,
    GValue * value, GParamSpec * pspec)
{
  GstVideoTestSrc *src = GST_VIDEO_TEST_SRC (object);

  switch (prop_id) {
    case PROP_PATTERN:
      g_value_set_enum (value, src->pattern_type);
      break;
    case PROP_TIMESTAMP_OFFSET:
      g_value_set_int64 (value, src->timestamp_offset);
      break;
    case PROP_IS_LIVE:
      g_value_set_boolean (value, gst_base_src_is_live (GST_BASE_SRC (src)));
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }
}
static void
gst_hdv1394src_init (GstHDV1394Src * dv1394src)
{
    GstPad *srcpad = GST_BASE_SRC_PAD (dv1394src);

    gst_base_src_set_live (GST_BASE_SRC (dv1394src), TRUE);
    gst_pad_use_fixed_caps (srcpad);

    dv1394src->port = DEFAULT_PORT;
    dv1394src->channel = DEFAULT_CHANNEL;

    dv1394src->use_avc = DEFAULT_USE_AVC;
    dv1394src->guid = DEFAULT_GUID;
    dv1394src->uri = g_strdup_printf ("hdv://%d", dv1394src->port);
    dv1394src->device_name = g_strdup_printf ("Default");

    READ_SOCKET (dv1394src) = -1;
    WRITE_SOCKET (dv1394src) = -1;

    dv1394src->frame_sequence = 0;
}
Пример #27
0
static void
gst_dshowvideosrc_init (GstDshowVideoSrc * src, GstDshowVideoSrcClass * klass)
{
  src->device = NULL;
  src->device_name = NULL;
  src->video_cap_filter = NULL;
  src->dshow_fakesink = NULL;
  src->media_filter = NULL;
  src->filter_graph = NULL;
  src->caps = NULL;
  src->pins_mediatypes = NULL;
  src->is_rgb = FALSE;

  src->buffer_cond = g_cond_new ();
  src->buffer_mutex = g_mutex_new ();
  src->buffer = NULL;
  src->stop_requested = FALSE;

  CoInitializeEx (NULL, COINIT_MULTITHREADED);

  gst_base_src_set_live (GST_BASE_SRC (src), TRUE);
}
Пример #28
0
/* initialize the new element
 * instantiate pads and add them to element
 * set functions
 * initialize structure
 */
static void
gst_dvbsrc_init (GstDvbSrc * object, GstDvbSrcClass * klass)
{
  int i = 0;

  GST_INFO_OBJECT (object, "gst_dvbsrc_init");

  /* We are a live source */
  gst_base_src_set_live (GST_BASE_SRC (object), TRUE);

  object->fd_frontend = -1;
  object->fd_dvr = -1;

  for (i = 0; i < MAX_FILTERS; i++) {
    object->pids[i] = G_MAXUINT16;
    object->fd_filters[i] = -1;
  }
  /* Pid 8192 on DVB gets the whole transport stream */
  object->pids[0] = 8192;

  object->adapter_number = DEFAULT_ADAPTER;
  object->frontend_number = DEFAULT_FRONTEND;
  object->diseqc_src = DEFAULT_DISEQC_SRC;
  object->send_diseqc = (DEFAULT_DISEQC_SRC != -1);
  /* object->pol = DVB_POL_H; *//* set via G_PARAM_CONSTRUCT */
  object->sym_rate = DEFAULT_SYMBOL_RATE;
  object->bandwidth = DEFAULT_BANDWIDTH;
  object->code_rate_hp = DEFAULT_CODE_RATE_HP;
  object->code_rate_lp = DEFAULT_CODE_RATE_LP;
  object->guard_interval = DEFAULT_GUARD;
  object->modulation = DEFAULT_MODULATION;
  object->transmission_mode = DEFAULT_TRANSMISSION_MODE;
  object->hierarchy_information = DEFAULT_HIERARCHY;
  object->inversion = DEFAULT_INVERSION;
  object->stats_interval = DEFAULT_STATS_REPORTING_INTERVAL;

  object->tune_mutex = g_mutex_new ();
}
Пример #29
0
static void
gstbt_audio_synth_calculate_buffer_frames (GstBtAudioSynth * self)
{
  const gdouble ticks_per_minute =
      (gdouble) (self->beats_per_minute * self->ticks_per_beat);
  const gdouble div = 60.0 / self->subticks_per_beat;
  const GstClockTime ticktime =
      (GstClockTime) (0.5 + ((GST_SECOND * 60.0) / ticks_per_minute));

  self->ticktime =
      (GstClockTime) (0.5 + ((GST_SECOND * div) / ticks_per_minute));
  self->samples_per_buffer = ((self->info.rate * div) / ticks_per_minute);
  GST_DEBUG ("samples_per_buffer=%lf", self->samples_per_buffer);
  self->generate_samples_per_buffer = (guint) (0.5 + self->samples_per_buffer);
  gst_base_src_set_blocksize (GST_BASE_SRC (self),
      gstbt_audio_synth_calculate_buffer_size (self));
  // the sequence is quantized to ticks and not subticks
  // we need to compensate for the rounding errors :/
  self->ticktime_err =
      ((gdouble) ticktime -
      (gdouble) (self->subticks_per_beat * self->ticktime)) /
      (gdouble) self->subticks_per_beat;
  GST_DEBUG ("ticktime err=%lf", self->ticktime_err);
}
Пример #30
0
    res &= gst_structure_get_uint (s, "subticks-per-beat", stpb);
  return res;
}

#if 0
// extra value calculated in the app from latency in ms
guint stpb = (glong) ((GST_SECOND * 60) / (bpm * tpb * latency * GST_MSECOND));
stpb = MAX (1, stpb);

// extra values calculated in plugins based on tempo values and samplerate
// stored values: subticktime (as ticktime), samples_per_buffer

gdouble tpm = (gdouble) (bpm * tpb);
gdouble div = 60.0 / stpb;
GstClockTime ticktime = (GstClockTime) (0.5 + ((GST_SECOND * 60.0) / tpm));
GstClockTime subticktime = (GstClockTime) (0.5 + ((GST_SECOND * div) / tpm));

gdouble samples_per_buffer = ((samplerate * div) / tpm);
guint generate_samples_per_buffer = (guint) (0.5 + samples_per_buffer);

// music apps quantize trigger events (notes) to ticks and not subticks
// we need to compensate for the rounding errors
// subticks are used to smooth modulation effects and lower live-latency
gdouble ticktime_err =
    ((gdouble) ticktime - (gdouble) (stpb * ticktime)) / (gdouble) stpb;

// the values are use like this in sources:
gst_base_src_set_blocksize (GST_BASE_SRC (self),
    channels * generate_samples_per_buffer * sizeof (gint16));
#endif