Пример #1
0
static GstFlowReturn
gst_inter_app_src_create (GstBaseSrc * src, guint64 offset, guint size,
    GstBuffer ** buf)
{
  GstInterAppSrc *interappsrc = GST_INTER_APP_SRC (src);
  GstBuffer *buffer = NULL;
  GstCaps *caps = NULL;
  gboolean changed;

  GST_DEBUG_OBJECT (interappsrc, "create");

  caps = gst_deferred_client_get_caps (&interappsrc->surface->app_client,
      &changed, TRUE);

  buffer = gst_deferred_client_get_buffer (&interappsrc->surface->app_client);

  if (changed) {
    GST_DEBUG_OBJECT (interappsrc, "Got caps: %" GST_PTR_FORMAT, caps);

    if (!gst_base_src_set_caps (src, caps)) {
      GST_ERROR_OBJECT (interappsrc, "Failed to set caps");
      gst_caps_unref (caps);
      return GST_FLOW_NOT_NEGOTIATED;
    }

    gst_caps_unref (caps);
  }

  GST_LOG_OBJECT (interappsrc, "Pushing %u bytes",
      (unsigned int) gst_buffer_get_size (buffer));

  *buf = buffer;

  return GST_FLOW_OK;
}
/**
 * shell_recorder_src_add_buffer:
 *
 * Adds a buffer to the internal queue to be pushed out at the next opportunity.
 * There is no flow control, so arbitrary amounts of memory may be used by
 * the buffers on the queue. The buffer contents must match the #GstCaps
 * set in the :caps property.
 */
void
shell_recorder_src_add_buffer (ShellRecorderSrc *src,
                               GstBuffer        *buffer)
{
    g_return_if_fail (SHELL_IS_RECORDER_SRC (src));
    g_return_if_fail (src->caps != NULL);

    gst_base_src_set_caps (GST_BASE_SRC (src), src->caps);
    shell_recorder_src_update_memory_used (src,
                                           (int)(gst_buffer_get_size(buffer) / 1024));

    g_async_queue_push (src->queue, gst_buffer_ref (buffer));
}
Пример #3
0
static gboolean gst_imx_v4l2src_negotiate(GstBaseSrc *src)
{
	GstImxV4l2VideoSrc *v4l2src = GST_IMX_V4L2SRC(src);
	GstCaps *caps;

	/* not much to negotiate;
	 * we already performed setup, so that is what will be streamed */
	caps = gst_imx_v4l2src_caps_for_current_setup(v4l2src);
	if (!caps)
		return FALSE;

	GST_INFO_OBJECT(src, "negotiated caps %" GST_PTR_FORMAT, (gpointer)caps);

	return gst_base_src_set_caps(src, caps);
}
Пример #4
0
static gboolean gst_imx_v4l2src_negotiate(GstBaseSrc *src)
{
	GstImxV4l2VideoSrc *v4l2src = GST_IMX_V4L2SRC(src);
	GstCaps *caps;
	GstVideoFormat gst_fmt;
	const gchar *pixel_format = NULL;
	const gchar *interlace_mode = "progressive";
	struct v4l2_format fmt;

	fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	if (ioctl(GST_IMX_FD_OBJECT_GET_FD(v4l2src->fd_obj_v4l), VIDIOC_G_FMT, &fmt) < 0) {
		GST_ERROR_OBJECT(v4l2src, "VIDIOC_G_FMT failed");
		return FALSE;
	}

	switch (fmt.fmt.pix.pixelformat) {
	case V4L2_PIX_FMT_YUV420: /* Special Case for handling YU12 */
		pixel_format = "I420";
		break;
	case V4L2_PIX_FMT_YUYV: /* Special Case for handling YUYV */
		pixel_format = "YUY2";
		break;
	default:
		gst_fmt = gst_video_format_from_fourcc(fmt.fmt.pix.pixelformat);
		pixel_format = gst_video_format_to_string(gst_fmt);
	}

	if (fmt.fmt.pix.field == V4L2_FIELD_INTERLACED)
		interlace_mode = "interleaved";

	/* not much to negotiate;
	 * we already performed setup, so that is what will be streamed */
	caps = gst_caps_new_simple("video/x-raw",
			"format", G_TYPE_STRING, pixel_format,
			"width", G_TYPE_INT, v4l2src->capture_width,
			"height", G_TYPE_INT, v4l2src->capture_height,
			"interlace-mode", G_TYPE_STRING, interlace_mode,
			"framerate", GST_TYPE_FRACTION, v4l2src->fps_n, v4l2src->fps_d,
			"pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
			NULL);

	GST_INFO_OBJECT(src, "negotiated caps %" GST_PTR_FORMAT, (gpointer)caps);

	return gst_base_src_set_caps(src, caps);
}
Пример #5
0
static gboolean gst_imx_v4l2src_negotiate(GstBaseSrc *src)
{
	GstImxV4l2Src *v4l2src = GST_IMX_V4L2SRC(src);
	GstCaps *caps;

	/* not much to negotiate;
	 * we already performed setup, so that is what will be streamed */

	caps = gst_caps_new_simple("video/x-raw",
			"format", G_TYPE_STRING, "I420",
			"width", G_TYPE_INT, v4l2src->capture_width,
			"height", G_TYPE_INT, v4l2src->capture_height,
			"framerate", GST_TYPE_FRACTION, v4l2src->fps_n, v4l2src->fps_d,
			"pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
			NULL);

	GST_INFO_OBJECT(src, "negotiated caps %" GST_PTR_FORMAT, (gpointer)caps);

	return gst_base_src_set_caps(src, caps);
}
Пример #6
0
static gboolean
gst_app_src_do_negotiate (GstBaseSrc * basesrc)
{
  GstAppSrc *appsrc = GST_APP_SRC_CAST (basesrc);
  GstAppSrcPrivate *priv = appsrc->priv;
  gboolean result;
  GstCaps *caps;

  GST_OBJECT_LOCK (basesrc);
  caps = priv->caps ? gst_caps_ref (priv->caps) : NULL;
  GST_OBJECT_UNLOCK (basesrc);

  if (caps) {
    result = gst_base_src_set_caps (basesrc, caps);
    gst_caps_unref (caps);
  } else {
    result = GST_BASE_SRC_CLASS (parent_class)->negotiate (basesrc);
  }

  return result;
}
Пример #7
0
static gboolean
gst_v4l2src_negotiate (GstBaseSrc * basesrc)
{
  GstV4l2Src *v4l2src;
  GstV4l2Object *obj;
  GstCaps *thiscaps;
  GstCaps *caps = NULL;
  GstCaps *peercaps = NULL;
  gboolean result = FALSE;

  v4l2src = GST_V4L2SRC (basesrc);
  obj = v4l2src->v4l2object;

  /* We don't allow renegotiation, just return TRUE in that case */
  if (GST_V4L2_IS_ACTIVE (obj))
    return TRUE;

  /* first see what is possible on our source pad */
  thiscaps = gst_pad_query_caps (GST_BASE_SRC_PAD (basesrc), NULL);
  GST_DEBUG_OBJECT (basesrc, "caps of src: %" GST_PTR_FORMAT, thiscaps);
  LOG_CAPS (basesrc, thiscaps);

  /* nothing or anything is allowed, we're done */
  if (thiscaps == NULL || gst_caps_is_any (thiscaps))
    goto no_nego_needed;

  /* get the peer caps without a filter as we'll filter ourselves later on */
  peercaps = gst_pad_peer_query_caps (GST_BASE_SRC_PAD (basesrc), NULL);
  GST_DEBUG_OBJECT (basesrc, "caps of peer: %" GST_PTR_FORMAT, peercaps);
  LOG_CAPS (basesrc, peercaps);
  if (peercaps && !gst_caps_is_any (peercaps)) {
    GstCaps *icaps = NULL;
    int i;

    /* Prefer the first caps we are compatible with that the peer proposed */
    for (i = 0; i < gst_caps_get_size (peercaps); i++) {
      /* get intersection */
      GstCaps *ipcaps = gst_caps_copy_nth (peercaps, i);

      GST_DEBUG_OBJECT (basesrc, "peer: %" GST_PTR_FORMAT, ipcaps);
      LOG_CAPS (basesrc, ipcaps);

      icaps = gst_caps_intersect (thiscaps, ipcaps);
      gst_caps_unref (ipcaps);

      if (!gst_caps_is_empty (icaps))
        break;

      gst_caps_unref (icaps);
      icaps = NULL;
    }

    GST_DEBUG_OBJECT (basesrc, "intersect: %" GST_PTR_FORMAT, icaps);
    LOG_CAPS (basesrc, icaps);
    if (icaps) {
      /* If there are multiple intersections pick the one with the smallest
       * resolution strictly bigger then the first peer caps */
      if (gst_caps_get_size (icaps) > 1) {
        GstStructure *s = gst_caps_get_structure (peercaps, 0);
        int best = 0;
        int twidth, theight;
        int width = G_MAXINT, height = G_MAXINT;

        if (gst_structure_get_int (s, "width", &twidth)
            && gst_structure_get_int (s, "height", &theight)) {

          /* Walk the structure backwards to get the first entry of the
           * smallest resolution bigger (or equal to) the preferred resolution)
           */
          for (i = gst_caps_get_size (icaps) - 1; i >= 0; i--) {
            GstStructure *is = gst_caps_get_structure (icaps, i);
            int w, h;

            if (gst_structure_get_int (is, "width", &w)
                && gst_structure_get_int (is, "height", &h)) {
              if (w >= twidth && w <= width && h >= theight && h <= height) {
                width = w;
                height = h;
                best = i;
              }
            }
          }
        }

        caps = gst_caps_copy_nth (icaps, best);
        gst_caps_unref (icaps);
      } else {
        caps = icaps;
      }
    }
    gst_caps_unref (thiscaps);
  } else {
    /* no peer or peer have ANY caps, work with our own caps then */
    caps = thiscaps;
  }
  if (peercaps)
    gst_caps_unref (peercaps);
  if (caps) {
    caps = gst_caps_truncate (caps);

    /* now fixate */
    if (!gst_caps_is_empty (caps)) {
      caps = gst_v4l2src_fixate (basesrc, caps);
      GST_DEBUG_OBJECT (basesrc, "fixated to: %" GST_PTR_FORMAT, caps);
      LOG_CAPS (basesrc, caps);

      if (gst_caps_is_any (caps)) {
        /* hmm, still anything, so element can do anything and
         * nego is not needed */
        result = TRUE;
      } else if (gst_caps_is_fixed (caps)) {
        /* yay, fixed caps, use those then */
        result = gst_base_src_set_caps (basesrc, caps);
      }
    }
    gst_caps_unref (caps);
  }
  return result;

no_nego_needed:
  {
    GST_DEBUG_OBJECT (basesrc, "no negotiation needed");
    if (thiscaps)
      gst_caps_unref (thiscaps);
    return TRUE;
  }
}
static GstFlowReturn
gst_decklink_video_src_create (GstPushSrc * bsrc, GstBuffer ** buffer)
{
  GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (bsrc);
  GstFlowReturn flow_ret = GST_FLOW_OK;
  const guint8 *data;
  gsize data_size;
  VideoFrame *vf;
  CaptureFrame *f;
  GstCaps *caps;
  gboolean caps_changed = FALSE;

  g_mutex_lock (&self->lock);
  while (g_queue_is_empty (&self->current_frames) && !self->flushing) {
    g_cond_wait (&self->cond, &self->lock);
  }

  f = (CaptureFrame *) g_queue_pop_head (&self->current_frames);
  g_mutex_unlock (&self->lock);

  if (self->flushing) {
    if (f)
      capture_frame_free (f);
    GST_DEBUG_OBJECT (self, "Flushing");
    return GST_FLOW_FLUSHING;
  }
  // If we're not flushing, we should have a valid frame from the queue
  g_assert (f != NULL);

  g_mutex_lock (&self->lock);
  if (self->caps_mode != f->mode) {
    if (self->mode == GST_DECKLINK_MODE_AUTO) {
      GST_DEBUG_OBJECT (self, "Mode changed from %d to %d", self->caps_mode,
          f->mode);
      caps_changed = TRUE;
      self->caps_mode = f->mode;
    } else {
      g_mutex_unlock (&self->lock);
      GST_ELEMENT_ERROR (self, CORE, NEGOTIATION,
          ("Invalid mode in captured frame"),
          ("Mode set to %d but captured %d", self->caps_mode, f->mode));
      capture_frame_free (f);
      return GST_FLOW_NOT_NEGOTIATED;
    }
  }
  if (self->caps_format != f->format) {
    if (self->video_format == GST_DECKLINK_VIDEO_FORMAT_AUTO) {
      GST_DEBUG_OBJECT (self, "Format changed from %d to %d", self->caps_format,
          f->format);
      caps_changed = TRUE;
      self->caps_format = f->format;
    } else {
      g_mutex_unlock (&self->lock);
      GST_ELEMENT_ERROR (self, CORE, NEGOTIATION,
          ("Invalid pixel format in captured frame"),
          ("Format set to %d but captured %d", self->caps_format, f->format));
      capture_frame_free (f);
      return GST_FLOW_NOT_NEGOTIATED;
    }
  }

  g_mutex_unlock (&self->lock);
  if (caps_changed) {
    caps = gst_decklink_mode_get_caps (f->mode, f->format);
    gst_video_info_from_caps (&self->info, caps);
    gst_base_src_set_caps (GST_BASE_SRC_CAST (bsrc), caps);
    gst_element_post_message (GST_ELEMENT_CAST (self),
        gst_message_new_latency (GST_OBJECT_CAST (self)));
    gst_caps_unref (caps);

  }

  f->frame->GetBytes ((gpointer *) & data);
  data_size = self->info.size;

  vf = (VideoFrame *) g_malloc0 (sizeof (VideoFrame));

  *buffer =
      gst_buffer_new_wrapped_full ((GstMemoryFlags) GST_MEMORY_FLAG_READONLY,
      (gpointer) data, data_size, 0, data_size, vf,
      (GDestroyNotify) video_frame_free);

  vf->frame = f->frame;
  f->frame->AddRef ();
  vf->input = self->input->input;
  vf->input->AddRef ();

  GST_BUFFER_TIMESTAMP (*buffer) = f->capture_time;
  GST_BUFFER_DURATION (*buffer) = f->capture_duration;
  gst_buffer_add_video_time_code_meta (*buffer, f->tc);

  GST_DEBUG_OBJECT (self,
      "Outputting buffer %p with timestamp %" GST_TIME_FORMAT " and duration %"
      GST_TIME_FORMAT, *buffer, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (*buffer)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (*buffer)));

  capture_frame_free (f);

  return flow_ret;
}
Пример #9
0
static gboolean
gst_data_uri_src_set_uri (GstURIHandler * handler, const gchar * uri,
    GError ** error)
{
  GstDataURISrc *src = GST_DATA_URI_SRC (handler);
  gboolean ret = FALSE;
  gchar *mimetype = NULL;
  const gchar *parameters_start;
  const gchar *data_start;
  const gchar *orig_uri = uri;
  GstCaps *caps;
  GstBuffer *buffer;
  gboolean base64 = FALSE;
  gchar *charset = NULL;
  gpointer bdata;
  gsize bsize;

  GST_OBJECT_LOCK (src);
  if (GST_STATE (src) >= GST_STATE_PAUSED)
    goto wrong_state;
  GST_OBJECT_UNLOCK (src);

  /* uri must be an URI as defined in RFC 2397
   * data:[<mediatype>][;base64],<data>
   */
  if (strncmp ("data:", uri, 5) != 0)
    goto invalid_uri;

  uri += 5;

  parameters_start = strchr (uri, ';');
  data_start = strchr (uri, ',');
  if (data_start == NULL)
    goto invalid_uri;

  if (data_start != uri && parameters_start != uri)
    mimetype =
        g_strndup (uri,
        (parameters_start ? parameters_start : data_start) - uri);
  else
    mimetype = g_strdup ("text/plain");

  GST_DEBUG_OBJECT (src, "Mimetype: %s", mimetype);

  if (parameters_start != NULL) {
    gchar **walk;
    gchar *parameters =
        g_strndup (parameters_start + 1, data_start - parameters_start - 1);
    gchar **parameters_strv;

    parameters_strv = g_strsplit (parameters, ";", -1);

    GST_DEBUG_OBJECT (src, "Parameters: ");
    walk = parameters_strv;
    while (*walk) {
      GST_DEBUG_OBJECT (src, "\t %s", *walk);
      if (strcmp ("base64", *walk) == 0) {
        base64 = TRUE;
      } else if (strncmp ("charset=", *walk, 8) == 0) {
        charset = g_strdup (*walk + 8);
      }
      walk++;
    }
    g_free (parameters);
    g_strfreev (parameters_strv);
  }

  /* Skip comma */
  data_start += 1;
  if (base64) {
    bdata = g_base64_decode (data_start, &bsize);
  } else {
    /* URI encoded, i.e. "percent" encoding */
    bdata = g_uri_unescape_string (data_start, NULL);
    if (bdata == NULL)
      goto invalid_uri_encoded_data;
    bsize = strlen (bdata) + 1;
  }
  /* Convert to UTF8 */
  if (strcmp ("text/plain", mimetype) == 0 &&
      charset && g_ascii_strcasecmp ("US-ASCII", charset) != 0
      && g_ascii_strcasecmp ("UTF-8", charset) != 0) {
    gsize read;
    gsize written;
    gpointer data;

    data =
        g_convert_with_fallback (bdata, -1, "UTF-8", charset, (char *) "*",
        &read, &written, NULL);
    g_free (bdata);

    bdata = data;
    bsize = written;
  }
  buffer = gst_buffer_new_wrapped (bdata, bsize);

  caps = gst_type_find_helper_for_buffer (GST_OBJECT (src), buffer, NULL);
  if (!caps)
    caps = gst_caps_new_empty_simple (mimetype);
  gst_base_src_set_caps (GST_BASE_SRC_CAST (src), caps);
  gst_caps_unref (caps);

  GST_OBJECT_LOCK (src);
  gst_buffer_replace (&src->buffer, buffer);
  gst_buffer_unref (buffer);
  g_free (src->uri);
  src->uri = g_strdup (orig_uri);
  GST_OBJECT_UNLOCK (src);

  ret = TRUE;

out:

  g_free (mimetype);
  g_free (charset);

  return ret;

wrong_state:
  {
    GST_WARNING_OBJECT (src, "Can't set URI in %s state",
        gst_element_state_get_name (GST_STATE (src)));
    GST_OBJECT_UNLOCK (src);
    g_set_error (error, GST_URI_ERROR, GST_URI_ERROR_BAD_STATE,
        "Changing the 'uri' property on dataurisrc while it is running "
        "is not supported");
    goto out;
  }
invalid_uri:
  {
    GST_WARNING_OBJECT (src, "invalid URI '%s'", uri);
    g_set_error (error, GST_URI_ERROR, GST_URI_ERROR_BAD_URI,
        "Invalid data URI");
    goto out;
  }
invalid_uri_encoded_data:
  {
    GST_WARNING_OBJECT (src, "Failed to parse data encoded in URI '%s'", uri);
    g_set_error (error, GST_URI_ERROR, GST_URI_ERROR_BAD_URI,
        "Could not parse data encoded in data URI");
    goto out;
  }
}
static gboolean
gst_rfb_src_negotiate (GstBaseSrc * bsrc)
{
  GstRfbSrc *src = GST_RFB_SRC (bsrc);
  RfbDecoder *decoder;
  GstCaps *caps;
  GstVideoInfo vinfo;
  GstVideoFormat vformat;
  guint32 red_mask, green_mask, blue_mask;
  gchar *stream_id = NULL;
  GstEvent *stream_start = NULL;

  decoder = src->decoder;

  if (decoder->inited)
    return TRUE;

  GST_DEBUG_OBJECT (src, "connecting to host %s on port %d",
      src->host, src->port);
  if (!rfb_decoder_connect_tcp (decoder, src->host, src->port)) {
    if (decoder->error != NULL) {
      GST_ELEMENT_ERROR (src, RESOURCE, READ,
          ("Could not connect to VNC server %s on port %d: %s", src->host,
              src->port, decoder->error->message), (NULL));
    } else {
      GST_ELEMENT_ERROR (src, RESOURCE, READ,
          ("Could not connect to VNC server %s on port %d", src->host,
              src->port), (NULL));
    }
    return FALSE;
  }

  while (!decoder->inited) {
    if (!rfb_decoder_iterate (decoder)) {
      if (decoder->error != NULL) {
        GST_ELEMENT_ERROR (src, RESOURCE, READ,
            ("Failed to setup VNC connection to host %s on port %d: %s",
                src->host, src->port, decoder->error->message), (NULL));
      } else {
        GST_ELEMENT_ERROR (src, RESOURCE, READ,
            ("Failed to setup VNC connection to host %s on port %d", src->host,
                src->port), (NULL));
      }
      return FALSE;
    }
  }

  stream_id = gst_pad_create_stream_id_printf (GST_BASE_SRC_PAD (bsrc),
      GST_ELEMENT (src), "%s:%d", src->host, src->port);
  stream_start = gst_event_new_stream_start (stream_id);
  g_free (stream_id);
  gst_pad_push_event (GST_BASE_SRC_PAD (bsrc), stream_start);

  decoder->rect_width =
      (decoder->rect_width ? decoder->rect_width : decoder->width);
  decoder->rect_height =
      (decoder->rect_height ? decoder->rect_height : decoder->height);

  decoder->decoder_private = src;

  /* calculate some many used values */
  decoder->bytespp = decoder->bpp / 8;
  decoder->line_size = decoder->rect_width * decoder->bytespp;

  GST_DEBUG_OBJECT (src, "setting caps width to %d and height to %d",
      decoder->rect_width, decoder->rect_height);

  red_mask = decoder->red_max << decoder->red_shift;
  green_mask = decoder->green_max << decoder->green_shift;
  blue_mask = decoder->blue_max << decoder->blue_shift;

  vformat = gst_video_format_from_masks (decoder->depth, decoder->bpp,
      decoder->big_endian ? G_BIG_ENDIAN : G_LITTLE_ENDIAN,
      red_mask, green_mask, blue_mask, 0);

  gst_video_info_init (&vinfo);

  gst_video_info_set_format (&vinfo, vformat, decoder->rect_width,
      decoder->rect_height);

  decoder->frame = g_malloc (vinfo.size);
  if (decoder->use_copyrect)
    decoder->prev_frame = g_malloc (vinfo.size);

  caps = gst_video_info_to_caps (&vinfo);

  gst_base_src_set_caps (bsrc, caps);

  gst_caps_unref (caps);

  return TRUE;
}
Пример #11
0
/* This is essentially gst_base_src_negotiate_default() but the caps
 * are guaranteed to have a channel layout for > 2 channels
 */
static gboolean
gst_pulsesrc_negotiate (GstBaseSrc * basesrc)
{
  GstPulseSrc *pulsesrc = GST_PULSESRC_CAST (basesrc);
  GstCaps *thiscaps;
  GstCaps *caps = NULL;
  GstCaps *peercaps = NULL;
  gboolean result = FALSE;

  /* first see what is possible on our source pad */
  thiscaps = gst_pad_query_caps (GST_BASE_SRC_PAD (basesrc), NULL);
  GST_DEBUG_OBJECT (basesrc, "caps of src: %" GST_PTR_FORMAT, thiscaps);
  /* nothing or anything is allowed, we're done */
  if (thiscaps == NULL || gst_caps_is_any (thiscaps))
    goto no_nego_needed;

  /* get the peer caps */
  peercaps = gst_pad_peer_query_caps (GST_BASE_SRC_PAD (basesrc), NULL);
  GST_DEBUG_OBJECT (basesrc, "caps of peer: %" GST_PTR_FORMAT, peercaps);
  if (peercaps) {
    /* get intersection */
    caps = gst_caps_intersect (thiscaps, peercaps);
    GST_DEBUG_OBJECT (basesrc, "intersect: %" GST_PTR_FORMAT, caps);
    gst_caps_unref (thiscaps);
    gst_caps_unref (peercaps);
  } else {
    /* no peer, work with our own caps then */
    caps = thiscaps;
  }
  if (caps) {
    /* take first (and best, since they are sorted) possibility */
    caps = gst_caps_truncate (caps);

    /* now fixate */
    if (!gst_caps_is_empty (caps)) {
      caps = GST_BASE_SRC_CLASS (parent_class)->fixate (basesrc, caps);
      GST_DEBUG_OBJECT (basesrc, "fixated to: %" GST_PTR_FORMAT, caps);

      if (gst_caps_is_any (caps)) {
        /* hmm, still anything, so element can do anything and
         * nego is not needed */
        result = TRUE;
      } else if (gst_caps_is_fixed (caps)) {
        /* yay, fixed caps, use those then */
        result = gst_pulsesrc_create_stream (pulsesrc, &caps);
        if (result)
          result = gst_base_src_set_caps (basesrc, caps);
      }
    }
    gst_caps_unref (caps);
  }
  return result;

no_nego_needed:
  {
    GST_DEBUG_OBJECT (basesrc, "no negotiation needed");
    if (thiscaps)
      gst_caps_unref (thiscaps);
    return TRUE;
  }
}
static GstFlowReturn
gst_decklink_video_src_create (GstPushSrc * bsrc, GstBuffer ** buffer)
{
  GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (bsrc);
  GstFlowReturn flow_ret = GST_FLOW_OK;
  const guint8 *data;
  gsize data_size;
  VideoFrame *vf;
  CaptureFrame *f;
  GstCaps *caps;

  g_mutex_lock (&self->lock);
  while (g_queue_is_empty (&self->current_frames) && !self->flushing) {
    g_cond_wait (&self->cond, &self->lock);
  }

  f = (CaptureFrame *) g_queue_pop_head (&self->current_frames);
  g_mutex_unlock (&self->lock);

  if (self->flushing) {
    if (f)
      capture_frame_free (f);
    GST_DEBUG_OBJECT (self, "Flushing");
    return GST_FLOW_FLUSHING;
  }

  g_mutex_lock (&self->lock);
  if (self->mode == GST_DECKLINK_MODE_AUTO && self->caps_mode != f->mode) {
    GST_DEBUG_OBJECT (self, "Mode changed from %d to %d", self->caps_mode,
        f->mode);
    self->caps_mode = f->mode;
    g_mutex_unlock (&self->lock);
    caps = gst_decklink_mode_get_caps (f->mode);
    gst_video_info_from_caps (&self->info, caps);
    gst_base_src_set_caps (GST_BASE_SRC_CAST (bsrc), caps);
    gst_element_post_message (GST_ELEMENT_CAST (self),
        gst_message_new_latency (GST_OBJECT_CAST (self)));
    gst_caps_unref (caps);
  } else {
    g_mutex_unlock (&self->lock);
  }

  f->frame->GetBytes ((gpointer *) & data);
  data_size = self->info.size;

  vf = (VideoFrame *) g_malloc0 (sizeof (VideoFrame));

  *buffer =
      gst_buffer_new_wrapped_full ((GstMemoryFlags) GST_MEMORY_FLAG_READONLY,
      (gpointer) data, data_size, 0, data_size, vf,
      (GDestroyNotify) video_frame_free);

  vf->frame = f->frame;
  f->frame->AddRef ();
  vf->input = self->input->input;
  vf->input->AddRef ();

  GST_BUFFER_TIMESTAMP (*buffer) = f->capture_time;
  GST_BUFFER_DURATION (*buffer) = f->capture_duration;

  GST_DEBUG_OBJECT (self,
      "Outputting buffer %p with timestamp %" GST_TIME_FORMAT " and duration %"
      GST_TIME_FORMAT, *buffer, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (*buffer)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (*buffer)));

  capture_frame_free (f);

  return flow_ret;
}
Пример #13
0
static GstFlowReturn
gst_inter_audio_src_create (GstBaseSrc * src, guint64 offset, guint size,
    GstBuffer ** buf)
{
  GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src);
  GstCaps *caps;
  GstBuffer *buffer;
  guint n, bpf;
  guint64 period_time;
  guint64 period_samples;

  GST_DEBUG_OBJECT (interaudiosrc, "create");

  buffer = NULL;
  caps = NULL;

  g_mutex_lock (&interaudiosrc->surface->mutex);
  if (interaudiosrc->surface->audio_info.finfo) {
    if (!gst_audio_info_is_equal (&interaudiosrc->surface->audio_info,
            &interaudiosrc->info)) {
      caps = gst_audio_info_to_caps (&interaudiosrc->surface->audio_info);
      interaudiosrc->timestamp_offset +=
          gst_util_uint64_scale (interaudiosrc->n_samples, GST_SECOND,
          interaudiosrc->info.rate);
      interaudiosrc->n_samples = 0;
    }
  }

  bpf = interaudiosrc->surface->audio_info.bpf;
  period_time = interaudiosrc->surface->audio_period_time;
  period_samples =
      gst_util_uint64_scale (period_time, interaudiosrc->info.rate, GST_SECOND);

  if (bpf > 0)
    n = gst_adapter_available (interaudiosrc->surface->audio_adapter) / bpf;
  else
    n = 0;

  if (n > period_samples)
    n = period_samples;
  if (n > 0) {
    buffer = gst_adapter_take_buffer (interaudiosrc->surface->audio_adapter,
        n * bpf);
  } else {
    buffer = gst_buffer_new ();
    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_GAP);
  }
  g_mutex_unlock (&interaudiosrc->surface->mutex);

  if (caps) {
    gboolean ret = gst_base_src_set_caps (src, caps);
    gst_caps_unref (caps);
    if (!ret) {
      GST_ERROR_OBJECT (src, "Failed to set caps %" GST_PTR_FORMAT, caps);
      if (buffer)
        gst_buffer_unref (buffer);
      return GST_FLOW_NOT_NEGOTIATED;
    }
  }

  buffer = gst_buffer_make_writable (buffer);

  bpf = interaudiosrc->info.bpf;
  if (n < period_samples) {
    GstMapInfo map;
    GstMemory *mem;

    GST_DEBUG_OBJECT (interaudiosrc,
        "creating %" G_GUINT64_FORMAT " samples of silence",
        period_samples - n);
    mem = gst_allocator_alloc (NULL, (period_samples - n) * bpf, NULL);
    if (gst_memory_map (mem, &map, GST_MAP_WRITE)) {
      gst_audio_format_fill_silence (interaudiosrc->info.finfo, map.data,
          map.size);
      gst_memory_unmap (mem, &map);
    }
    gst_buffer_prepend_memory (buffer, mem);
  }
  n = period_samples;

  GST_BUFFER_OFFSET (buffer) = interaudiosrc->n_samples;
  GST_BUFFER_OFFSET_END (buffer) = interaudiosrc->n_samples + n;
  GST_BUFFER_TIMESTAMP (buffer) = interaudiosrc->timestamp_offset +
      gst_util_uint64_scale (interaudiosrc->n_samples, GST_SECOND,
      interaudiosrc->info.rate);
  GST_DEBUG_OBJECT (interaudiosrc, "create ts %" GST_TIME_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));
  GST_BUFFER_DURATION (buffer) = interaudiosrc->timestamp_offset +
      gst_util_uint64_scale (interaudiosrc->n_samples + n, GST_SECOND,
      interaudiosrc->info.rate) - GST_BUFFER_TIMESTAMP (buffer);
  GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT);
  if (interaudiosrc->n_samples == 0) {
    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
  }
  interaudiosrc->n_samples += n;

  *buf = buffer;

  return GST_FLOW_OK;
}
/* create a socket for connecting to remote server */
static gboolean
gst_neonhttp_src_start (GstBaseSrc * bsrc)
{
    GstNeonhttpSrc *src = GST_NEONHTTP_SRC (bsrc);
    const gchar *content_length;
    gint res;

#ifndef GST_DISABLE_GST_DEBUG
    if (src->neon_http_debug)
        ne_debug_init (stderr, NE_DBG_HTTP);
#endif

    ne_oom_callback (oom_callback);

    res = ne_sock_init ();
    if (res != 0)
        goto init_failed;

    res = gst_neonhttp_src_send_request_and_redirect (src,
            &src->session, &src->request, 0, src->automatic_redirect);

    if (res != NE_OK || !src->session) {
        if (res == HTTP_SOCKET_ERROR) {
            goto socket_error;
        } else if (res == HTTP_REQUEST_WRONG_PROXY) {
            goto wrong_proxy;
        } else {
            goto begin_req_failed;
        }
    }

    content_length = ne_get_response_header (src->request, "Content-Length");

    if (content_length)
        src->content_size = g_ascii_strtoull (content_length, NULL, 10);
    else
        src->content_size = -1;

    if (TRUE) {
        /* Icecast stuff */
        const gchar *str_value;
        GstTagList *tags;
        gchar *iradio_name;
        gchar *iradio_url;
        gchar *iradio_genre;
        gint icy_metaint;

        tags = gst_tag_list_new_empty ();

        str_value = ne_get_response_header (src->request, "icy-metaint");
        if (str_value) {
            if (sscanf (str_value, "%d", &icy_metaint) == 1) {
                GstCaps *icy_caps;

                icy_caps = gst_caps_new_simple ("application/x-icy",
                                                "metadata-interval", G_TYPE_INT, icy_metaint, NULL);
                gst_base_src_set_caps (GST_BASE_SRC (src), icy_caps);
            }
        }

        /* FIXME: send tags with name, genre, url */
        str_value = ne_get_response_header (src->request, "icy-name");
        if (str_value) {
            iradio_name = gst_neonhttp_src_unicodify (str_value);
            if (iradio_name) {
                gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE, GST_TAG_ORGANIZATION,
                                  iradio_name, NULL);
                g_free (iradio_name);
            }
        }
        str_value = ne_get_response_header (src->request, "icy-genre");
        if (str_value) {
            iradio_genre = gst_neonhttp_src_unicodify (str_value);
            if (iradio_genre) {
                gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE, GST_TAG_GENRE,
                                  iradio_genre, NULL);
                g_free (iradio_genre);
            }
        }
        str_value = ne_get_response_header (src->request, "icy-url");
        if (str_value) {
            iradio_url = gst_neonhttp_src_unicodify (str_value);
            if (iradio_url) {
                gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE, GST_TAG_LOCATION,
                                  iradio_url, NULL);
                g_free (iradio_url);
            }
        }
        if (!gst_tag_list_is_empty (tags)) {
            GST_DEBUG_OBJECT (src, "pushing tag list %" GST_PTR_FORMAT, tags);
            gst_pad_push_event (GST_BASE_SRC_PAD (src), gst_event_new_tag (tags));
        } else {
            gst_tag_list_unref (tags);
        }
    }

    return TRUE;

    /* ERRORS */
init_failed:
    {
        GST_ELEMENT_ERROR (src, LIBRARY, INIT, (NULL),
                           ("ne_sock_init() failed: %d", res));
        return FALSE;
    }
socket_error:
    {
        GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ, (NULL),
                           ("HTTP Request failed when opening socket: %d", res));
        return FALSE;
    }
wrong_proxy:
    {
        GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
                           ("Proxy Server URI is invalid - make sure that either both proxy host "
                            "and port are specified or neither."));
        return FALSE;
    }
begin_req_failed:
    {
        GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ, (NULL),
                           ("Could not begin request: %d", res));
        return FALSE;
    }
}