static GdkPixbuf *
gst_gdk_pixbuf_sink_get_pixbuf_from_buffer (GstGdkPixbufSink * sink,
    GstBuffer * buf)
{
  GdkPixbuf *pix = NULL;
  GstVideoFrame *frame;
  gint minsize, bytes_per_pixel;

  g_return_val_if_fail (GST_VIDEO_SINK_WIDTH (sink) > 0, NULL);
  g_return_val_if_fail (GST_VIDEO_SINK_HEIGHT (sink) > 0, NULL);

  frame = g_slice_new0 (GstVideoFrame);
  gst_video_frame_map (frame, &sink->info, buf, GST_MAP_READ);

  bytes_per_pixel = (sink->has_alpha) ? 4 : 3;

  /* last row needn't have row padding */
  minsize = (GST_VIDEO_FRAME_COMP_STRIDE (frame, 0) *
      (GST_VIDEO_SINK_HEIGHT (sink) - 1)) +
      (bytes_per_pixel * GST_VIDEO_SINK_WIDTH (sink));

  g_return_val_if_fail (gst_buffer_get_size (buf) >= minsize, NULL);

  gst_buffer_ref (buf);
  pix = gdk_pixbuf_new_from_data (GST_VIDEO_FRAME_COMP_DATA (frame, 0),
      GDK_COLORSPACE_RGB, sink->has_alpha, 8, GST_VIDEO_SINK_WIDTH (sink),
      GST_VIDEO_SINK_HEIGHT (sink), GST_VIDEO_FRAME_COMP_STRIDE (frame, 0),
      (GdkPixbufDestroyNotify) gst_gdk_pixbuf_sink_pixbuf_destroy_notify,
      frame);

  return pix;
}
static GdkPixbuf *
gst_gdk_pixbuf_sink_get_pixbuf_from_buffer (GstGdkPixbufSink * sink,
    GstBuffer * buf)
{
  GdkPixbuf *pix = NULL;
  gint minsize, bytes_per_pixel;

  g_return_val_if_fail (GST_VIDEO_SINK_WIDTH (sink) > 0, NULL);
  g_return_val_if_fail (GST_VIDEO_SINK_HEIGHT (sink) > 0, NULL);

  bytes_per_pixel = (sink->has_alpha) ? 4 : 3;

  /* last row needn't have row padding */
  minsize = (sink->rowstride * (GST_VIDEO_SINK_HEIGHT (sink) - 1)) +
      (bytes_per_pixel * GST_VIDEO_SINK_WIDTH (sink));

  g_return_val_if_fail (GST_BUFFER_SIZE (buf) >= minsize, NULL);

  pix = gdk_pixbuf_new_from_data (GST_BUFFER_DATA (buf),
      GDK_COLORSPACE_RGB, sink->has_alpha, 8, GST_VIDEO_SINK_WIDTH (sink),
      GST_VIDEO_SINK_HEIGHT (sink), sink->rowstride,
      (GdkPixbufDestroyNotify) gst_gdk_pixbuf_sink_pixbuf_destroy_notify,
      gst_buffer_ref (buf));

  return pix;
}
Example #3
0
static gboolean
gst_kms_sink_calculate_display_ratio (GstKMSSink * self, GstVideoInfo * vinfo)
{
  guint dar_n, dar_d;
  guint video_width, video_height;
  guint video_par_n, video_par_d;
  guint dpy_par_n, dpy_par_d;

  video_width = GST_VIDEO_INFO_WIDTH (vinfo);
  video_height = GST_VIDEO_INFO_HEIGHT (vinfo);
  video_par_n = GST_VIDEO_INFO_PAR_N (vinfo);
  video_par_d = GST_VIDEO_INFO_PAR_D (vinfo);

  gst_video_calculate_device_ratio (self->hdisplay, self->vdisplay,
      self->mm_width, self->mm_height, &dpy_par_n, &dpy_par_d);

  if (!gst_video_calculate_display_ratio (&dar_n, &dar_d, video_width,
          video_height, video_par_n, video_par_d, dpy_par_n, dpy_par_d))
    return FALSE;

  GST_DEBUG_OBJECT (self, "video calculated display ratio: %d/%d", dar_n,
      dar_d);

  /* now find a width x height that respects this display ratio.
   * prefer those that have one of w/h the same as the incoming video
   * using wd / hd = dar_n / dar_d */

  /* start with same height, because of interlaced video */
  /* check hd / dar_d is an integer scale factor, and scale wd with the PAR */
  if (video_height % dar_d == 0) {
    GST_DEBUG_OBJECT (self, "keeping video height");
    GST_VIDEO_SINK_WIDTH (self) = (guint)
        gst_util_uint64_scale_int (video_height, dar_n, dar_d);
    GST_VIDEO_SINK_HEIGHT (self) = video_height;
  } else if (video_width % dar_n == 0) {
    GST_DEBUG_OBJECT (self, "keeping video width");
    GST_VIDEO_SINK_WIDTH (self) = video_width;
    GST_VIDEO_SINK_HEIGHT (self) = (guint)
        gst_util_uint64_scale_int (video_width, dar_d, dar_n);
  } else {
    GST_DEBUG_OBJECT (self, "approximating while keeping video height");
    GST_VIDEO_SINK_WIDTH (self) = (guint)
        gst_util_uint64_scale_int (video_height, dar_n, dar_d);
    GST_VIDEO_SINK_HEIGHT (self) = video_height;
  }
  GST_DEBUG_OBJECT (self, "scaling to %dx%d", GST_VIDEO_SINK_WIDTH (self),
      GST_VIDEO_SINK_HEIGHT (self));

  return TRUE;
}
Example #4
0
static void
gst_sdlvideosink_navigation_send_event (GstNavigation * navigation,
                                        GstStructure * structure)
{
    GstSDLVideoSink *sdlvideosink = GST_SDLVIDEOSINK (navigation);
    GstEvent *event;
    GstVideoRectangle dst = { 0, };
    GstVideoRectangle src = { 0, };
    GstVideoRectangle result;
    double x, y, old_x, old_y;
    GstPad *pad = NULL;

    src.w = GST_VIDEO_SINK_WIDTH (sdlvideosink);
    src.h = GST_VIDEO_SINK_HEIGHT (sdlvideosink);
    dst.w = sdlvideosink->width;
    dst.h = sdlvideosink->height;
    gst_video_sink_center_rect (src, dst, &result, FALSE);

    event = gst_event_new_navigation (structure);

    /* Our coordinates can be wrong here if we centered the video */

    /* Converting pointer coordinates to the non scaled geometry */
    if (gst_structure_get_double (structure, "pointer_x", &old_x)) {
        x = old_x;

        if (x >= result.x && x <= (result.x + result.w)) {
            x -= result.x;
            x *= sdlvideosink->width;
            x /= result.w;
        } else {
            x = 0;
        }
        GST_DEBUG_OBJECT (sdlvideosink, "translated navigation event x "
                          "coordinate from %f to %f", old_x, x);
        gst_structure_set (structure, "pointer_x", G_TYPE_DOUBLE, x, NULL);
    }
    if (gst_structure_get_double (structure, "pointer_y", &old_y)) {
        y = old_y;

        if (y >= result.y && y <= (result.y + result.h)) {
            y -= result.y;
            y *= sdlvideosink->height;
            y /= result.h;
        } else {
            y = 0;
        }
        GST_DEBUG_OBJECT (sdlvideosink, "translated navigation event y "
                          "coordinate from %f to %f", old_y, y);
        gst_structure_set (structure, "pointer_y", G_TYPE_DOUBLE, y, NULL);
    }

    pad = gst_pad_get_peer (GST_VIDEO_SINK_PAD (sdlvideosink));

    if (GST_IS_PAD (pad) && GST_IS_EVENT (event)) {
        gst_pad_send_event (pad, event);

        gst_object_unref (pad);
    }
}
static gboolean
gst_glimage_sink_redisplay (GstGLImageSink * gl_sink)
{
  GstGLWindow *window;
  gboolean alive;

  window = gst_gl_context_get_window (gl_sink->context);

  if (window && gst_gl_window_is_running (window)) {

#if GST_GL_HAVE_GLES2
    if (USING_GLES2 (gl_sink->context)) {
      if (!gl_sink->redisplay_shader) {
        gst_gl_window_send_message (window,
            GST_GL_WINDOW_CB (gst_glimage_sink_thread_init_redisplay), gl_sink);
      }
    }
#endif

    /* Drawing is asynchrone: gst_gl_window_draw is not blocking
     * It means that it does not wait for stuff being executed in other threads
     */
    gst_gl_window_draw (window, GST_VIDEO_SINK_WIDTH (gl_sink),
        GST_VIDEO_SINK_HEIGHT (gl_sink));
  }
  alive = gst_gl_window_is_running (window);
  gst_object_unref (window);

  return alive;
}
Example #6
0
static gboolean
gst_v4l2sink_set_caps (GstBaseSink * bsink, GstCaps * caps)
{
  GstV4l2Sink *v4l2sink = GST_V4L2SINK (bsink);
  GstV4l2Object *obj = v4l2sink->v4l2object;

  LOG_CAPS (v4l2sink, caps);

  if (!GST_V4L2_IS_OPEN (v4l2sink->v4l2object)) {
    GST_DEBUG_OBJECT (v4l2sink, "device is not open");
    return FALSE;
  }

  /* make sure the caps changed before doing anything */
  if (gst_v4l2_object_caps_equal (obj, caps))
    return TRUE;

  if (!gst_v4l2_object_stop (obj))
    goto stop_failed;

  if (!gst_v4l2_object_set_format (v4l2sink->v4l2object, caps))
    goto invalid_format;

  gst_v4l2sink_sync_overlay_fields (v4l2sink);
  gst_v4l2sink_sync_crop_fields (v4l2sink);

#ifdef HAVE_XVIDEO
  gst_v4l2_video_overlay_prepare_window_handle (v4l2sink->v4l2object, TRUE);
#endif

  GST_INFO_OBJECT (v4l2sink, "outputting buffers via mmap()");

  v4l2sink->video_width = GST_V4L2_WIDTH (v4l2sink->v4l2object);
  v4l2sink->video_height = GST_V4L2_HEIGHT (v4l2sink->v4l2object);

  /* TODO: videosink width/height should be scaled according to
   * pixel-aspect-ratio
   */
  GST_VIDEO_SINK_WIDTH (v4l2sink) = v4l2sink->video_width;
  GST_VIDEO_SINK_HEIGHT (v4l2sink) = v4l2sink->video_height;

  return TRUE;

  /* ERRORS */
stop_failed:
  {
    GST_DEBUG_OBJECT (v4l2sink, "failed to stop streaming");
    return FALSE;
  }
invalid_format:
  {
    /* error already posted */
    GST_DEBUG_OBJECT (v4l2sink, "can't set format");
    return FALSE;
  }
}
static void
gst_glimage_sink_navigation_send_event (GstNavigation * navigation, GstStructure
    * structure)
{
  GstGLImageSink *sink = GST_GLIMAGE_SINK (navigation);
  GstEvent *event = NULL;
  GstPad *pad = NULL;
  GstGLWindow *window = gst_gl_context_get_window (sink->context);
  guint width, height;
  gdouble x, y, xscale, yscale;

  g_return_if_fail (GST_GL_IS_WINDOW (window));

  width = GST_VIDEO_SINK_WIDTH (sink);
  height = GST_VIDEO_SINK_HEIGHT (sink);
  gst_gl_window_get_surface_dimensions (window, &width, &height);

  event = gst_event_new_navigation (structure);

  pad = gst_pad_get_peer (GST_VIDEO_SINK_PAD (sink));
  /* Converting pointer coordinates to the non scaled geometry */
  if (width != GST_VIDEO_SINK_WIDTH (sink) &&
      width != 0 && gst_structure_get_double (structure, "pointer_x", &x)) {
    xscale = (gdouble) GST_VIDEO_SINK_WIDTH (sink) / width;
    gst_structure_set (structure, "pointer_x", G_TYPE_DOUBLE,
        (gdouble) x * xscale, NULL);
  }
  if (height != GST_VIDEO_SINK_HEIGHT (sink) &&
      height != 0 && gst_structure_get_double (structure, "pointer_y", &y)) {
    yscale = (gdouble) GST_VIDEO_SINK_HEIGHT (sink) / height;
    gst_structure_set (structure, "pointer_y", G_TYPE_DOUBLE,
        (gdouble) y * yscale, NULL);
  }


  if (GST_IS_PAD (pad) && GST_IS_EVENT (event))
    gst_pad_send_event (pad, event);

  gst_object_unref (pad);
  gst_object_unref (window);
}
static GstFlowReturn
gst_glimage_sink_show_frame (GstVideoSink * vsink, GstBuffer * buf)
{
  GstGLImageSink *glimage_sink;
  GstBuffer *stored_buffer;

  GST_TRACE ("rendering buffer:%p", buf);

  glimage_sink = GST_GLIMAGE_SINK (vsink);

  GST_TRACE ("redisplay texture:%u of size:%ux%u, window size:%ux%u",
      glimage_sink->next_tex, GST_VIDEO_INFO_WIDTH (&glimage_sink->info),
      GST_VIDEO_INFO_HEIGHT (&glimage_sink->info),
      GST_VIDEO_SINK_WIDTH (glimage_sink),
      GST_VIDEO_SINK_HEIGHT (glimage_sink));

  /* Avoid to release the texture while drawing */
  GST_GLIMAGE_SINK_LOCK (glimage_sink);
  glimage_sink->redisplay_texture = glimage_sink->next_tex;
  stored_buffer = glimage_sink->stored_buffer;
  glimage_sink->stored_buffer = gst_buffer_ref (buf);
  GST_GLIMAGE_SINK_UNLOCK (glimage_sink);
  if (stored_buffer)
    gst_buffer_unref (stored_buffer);

  /* Ask the underlying window to redraw its content */
  if (!gst_glimage_sink_redisplay (glimage_sink))
    goto redisplay_failed;

  GST_TRACE ("post redisplay");

  if (g_atomic_int_get (&glimage_sink->to_quit) != 0) {
    GST_ELEMENT_ERROR (glimage_sink, RESOURCE, NOT_FOUND,
        ("%s", gst_gl_context_get_error ()), (NULL));
    gst_gl_upload_release_buffer (glimage_sink->upload);
    return GST_FLOW_ERROR;
  }

  return GST_FLOW_OK;

/* ERRORS */
redisplay_failed:
  {
    gst_gl_upload_release_buffer (glimage_sink->upload);
    GST_ELEMENT_ERROR (glimage_sink, RESOURCE, NOT_FOUND,
        ("%s", gst_gl_context_get_error ()), (NULL));
    return GST_FLOW_ERROR;
  }
}
Example #9
0
static gboolean
gst_glimage_sink_set_caps (GstBaseSink * bsink, GstCaps * caps)
{
  GstGLImageSink *glimage_sink;
  gint width;
  gint height;
  gboolean ok;
  gint fps_n, fps_d;
  gint par_n, par_d;
  GstVideoFormat format;
  GstStructure *structure;
  gboolean is_gl;

  GST_DEBUG ("set caps with %" GST_PTR_FORMAT, caps);

  glimage_sink = GST_GLIMAGE_SINK (bsink);

  structure = gst_caps_get_structure (caps, 0);
  if (gst_structure_has_name (structure, "video/x-raw-gl")) {
    is_gl = TRUE;
    format = GST_VIDEO_FORMAT_UNKNOWN;
    ok = gst_structure_get_int (structure, "width", &width);
    ok &= gst_structure_get_int (structure, "height", &height);
  } else {
    is_gl = FALSE;
    ok = gst_video_format_parse_caps (caps, &format, &width, &height);
  }
  ok &= gst_video_parse_caps_framerate (caps, &fps_n, &fps_d);
  ok &= gst_video_parse_caps_pixel_aspect_ratio (caps, &par_n, &par_d);

  if (!ok)
    return FALSE;

  GST_VIDEO_SINK_WIDTH (glimage_sink) = width;
  GST_VIDEO_SINK_HEIGHT (glimage_sink) = height;
  glimage_sink->is_gl = is_gl;
  glimage_sink->format = format;
  glimage_sink->width = width;
  glimage_sink->height = height;
  glimage_sink->fps_n = fps_n;
  glimage_sink->fps_d = fps_d;
  glimage_sink->par_n = par_n;
  glimage_sink->par_d = par_d;

  if (!glimage_sink->window_id && !glimage_sink->new_window_id)
    gst_x_overlay_prepare_xwindow_id (GST_X_OVERLAY (glimage_sink));

  return TRUE;
}
static GstStateChangeReturn
gst_pvrvideosink_change_state (GstElement * element, GstStateChange transition)
{
  GstPVRVideoSink *pvrvideosink;
  GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
  GstDrawContext *dcontext;

  pvrvideosink = GST_PVRVIDEOSINK (element);

  switch (transition) {
    case GST_STATE_CHANGE_NULL_TO_READY:
      if (pvrvideosink->dcontext == NULL) {
        dcontext = gst_pvrvideosink_get_dcontext (pvrvideosink);
        if (dcontext == NULL)
          return GST_STATE_CHANGE_FAILURE;
        GST_OBJECT_LOCK (pvrvideosink);
        pvrvideosink->dcontext = dcontext;
        GST_OBJECT_UNLOCK (pvrvideosink);
      }
      gst_pvrvideosink_manage_event_thread (pvrvideosink);
      break;
    case GST_STATE_CHANGE_READY_TO_PAUSED:
      break;
    case GST_STATE_CHANGE_PAUSED_TO_READY:
      break;
    case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
      break;
    default:
      break;
  }

  ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);

  switch (transition) {
    case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
      break;
    case GST_STATE_CHANGE_PAUSED_TO_READY:
      GST_VIDEO_SINK_WIDTH (pvrvideosink) = 0;
      GST_VIDEO_SINK_HEIGHT (pvrvideosink) = 0;
      break;
    case GST_STATE_CHANGE_READY_TO_NULL:
      gst_pvrvideosink_reset (pvrvideosink);
      break;
    default:
      break;
  }

  return ret;
}
Example #11
0
static void
gst_vdp_sink_navigation_send_event (GstNavigation * navigation,
    GstStructure * structure)
{
  VdpSink *vdp_sink = GST_VDP_SINK (navigation);
  GstEvent *event;
  gint x_offset, y_offset;
  gdouble x, y;
  GstPad *pad = NULL;

  event = gst_event_new_navigation (structure);

  /* We are not converting the pointer coordinates as there's no hardware
     scaling done here. The only possible scaling is done by videoscale and
     videoscale will have to catch those events and tranform the coordinates
     to match the applied scaling. So here we just add the offset if the image
     is centered in the window.  */

  /* We take the flow_lock while we look at the window */
  g_mutex_lock (vdp_sink->flow_lock);

  if (!vdp_sink->window) {
    g_mutex_unlock (vdp_sink->flow_lock);
    return;
  }

  x_offset = vdp_sink->window->width - GST_VIDEO_SINK_WIDTH (vdp_sink);
  y_offset = vdp_sink->window->height - GST_VIDEO_SINK_HEIGHT (vdp_sink);

  g_mutex_unlock (vdp_sink->flow_lock);

  if (x_offset > 0 && gst_structure_get_double (structure, "pointer_x", &x)) {
    x -= x_offset / 2;
    gst_structure_set (structure, "pointer_x", G_TYPE_DOUBLE, x, NULL);
  }
  if (y_offset > 0 && gst_structure_get_double (structure, "pointer_y", &y)) {
    y -= y_offset / 2;
    gst_structure_set (structure, "pointer_y", G_TYPE_DOUBLE, y, NULL);
  }

  pad = gst_pad_get_peer (GST_VIDEO_SINK_PAD (vdp_sink));

  if (GST_IS_PAD (pad) && GST_IS_EVENT (event)) {
    gst_pad_send_event (pad, event);

    gst_object_unref (pad);
  }
}
Example #12
0
static void
gst_glimage_sink_on_resize (GstGLImageSink * gl_sink, gint width, gint height)
{
  /* Here gl_sink members (ex:gl_sink->info) have a life time of set_caps.
   * It means that they cannot not change between two set_caps
   */
  const GstGLFuncs *gl = gl_sink->context->gl_vtable;
  gboolean do_reshape;

  GST_TRACE ("GL Window resized to %ux%u", width, height);

  /* check if a client reshape callback is registered */
  g_signal_emit (gl_sink, gst_glimage_sink_signals[CLIENT_RESHAPE_SIGNAL], 0,
      gl_sink->context, width, height, &do_reshape);

  /* default reshape */
  if (!do_reshape) {
    if (gl_sink->keep_aspect_ratio) {
      GstVideoRectangle src, dst, result;

      src.x = 0;
      src.y = 0;
      src.w = GST_VIDEO_SINK_WIDTH (gl_sink);
      src.h = GST_VIDEO_SINK_HEIGHT (gl_sink);

      dst.x = 0;
      dst.y = 0;
      dst.w = width;
      dst.h = height;

      gst_video_sink_center_rect (src, dst, &result, TRUE);
      gl->Viewport (result.x, result.y, result.w, result.h);
    } else {
      gl->Viewport (0, 0, width, height);
    }
#if GST_GL_HAVE_OPENGL
    if (USING_OPENGL (gl_sink->context)) {
      gl->MatrixMode (GL_PROJECTION);
      gl->LoadIdentity ();
      gluOrtho2D (0, width, 0, height);
      gl->MatrixMode (GL_MODELVIEW);
    }
#endif
  }
}
Example #13
0
static GstStateChangeReturn
gst_glimage_sink_change_state (GstElement * element, GstStateChange transition)
{
  GstGLImageSink *glimage_sink;
  GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;

  GST_DEBUG ("change state");

  glimage_sink = GST_GLIMAGE_SINK (element);

  switch (transition) {
    case GST_STATE_CHANGE_NULL_TO_READY:
      break;
    case GST_STATE_CHANGE_READY_TO_PAUSED:
      break;
    case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
      break;
    default:
      break;
  }

  ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
  if (ret == GST_STATE_CHANGE_FAILURE)
    return ret;

  switch (transition) {
    case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
      break;
    case GST_STATE_CHANGE_PAUSED_TO_READY:
      gst_glimage_sink_stop (GST_BASE_SINK (glimage_sink));
      glimage_sink->fps_n = 0;
      glimage_sink->fps_d = 1;
      GST_VIDEO_SINK_WIDTH (glimage_sink) = 0;
      GST_VIDEO_SINK_HEIGHT (glimage_sink) = 0;
      break;
    case GST_STATE_CHANGE_READY_TO_NULL:
      break;
    default:
      break;
  }

  return ret;
}
static gboolean
gst_gdk_pixbuf_sink_set_caps (GstBaseSink * basesink, GstCaps * caps)
{
  GstGdkPixbufSink *sink = GST_GDK_PIXBUF_SINK (basesink);
  GstVideoInfo info;
  GstVideoFormat fmt;
  gint w, h, s, par_n, par_d;

  GST_LOG_OBJECT (sink, "caps: %" GST_PTR_FORMAT, caps);

  if (!gst_video_info_from_caps (&info, caps)) {
    GST_WARNING_OBJECT (sink, "parse_caps failed");
    return FALSE;
  }

  fmt = GST_VIDEO_INFO_FORMAT (&info);
  w = GST_VIDEO_INFO_WIDTH (&info);
  h = GST_VIDEO_INFO_HEIGHT (&info);
  s = GST_VIDEO_INFO_COMP_PSTRIDE (&info, 0);
  par_n = GST_VIDEO_INFO_PAR_N (&info);
  par_d = GST_VIDEO_INFO_PAR_N (&info);

  g_assert ((fmt == GST_VIDEO_FORMAT_RGB && s == 3) ||
      (fmt == GST_VIDEO_FORMAT_RGBA && s == 4));

  GST_VIDEO_SINK_WIDTH (sink) = w;
  GST_VIDEO_SINK_HEIGHT (sink) = h;

  sink->par_n = par_n;
  sink->par_d = par_d;

  sink->has_alpha = GST_VIDEO_INFO_HAS_ALPHA (&info);

  GST_INFO_OBJECT (sink, "format             : %d", fmt);
  GST_INFO_OBJECT (sink, "width x height     : %d x %d", w, h);
  GST_INFO_OBJECT (sink, "pixel-aspect-ratio : %d/%d", par_n, par_d);

  sink->info = info;

  return TRUE;
}
static gboolean
gst_gdk_pixbuf_sink_stop (GstBaseSink * basesink)
{
  GstGdkPixbufSink *sink = GST_GDK_PIXBUF_SINK (basesink);

  GST_VIDEO_SINK_WIDTH (sink) = 0;
  GST_VIDEO_SINK_HEIGHT (sink) = 0;

  sink->par_n = 0;
  sink->par_d = 0;
  sink->has_alpha = FALSE;

  if (sink->last_pixbuf) {
    g_object_unref (sink->last_pixbuf);
    sink->last_pixbuf = NULL;
  }

  GST_LOG_OBJECT (sink, "stop");

  return TRUE;
}
static GstStateChangeReturn
gst_slvideo_change_state(GstElement * element, GstStateChange transition)
{
	GstSLVideo *slvideo;
	GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
	
	slvideo = GST_SLVIDEO (element);

	switch (transition) {
	case GST_STATE_CHANGE_NULL_TO_READY:
		break;
	case GST_STATE_CHANGE_READY_TO_PAUSED:
		break;
	case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
		break;
	default:
		break;
	}

	ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
	if (ret == GST_STATE_CHANGE_FAILURE)
		return ret;

	switch (transition) {
	case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
		break;
	case GST_STATE_CHANGE_PAUSED_TO_READY:
		slvideo->fps_n = 0;
		slvideo->fps_d = 1;
		GST_VIDEO_SINK_WIDTH(slvideo) = 0;
		GST_VIDEO_SINK_HEIGHT(slvideo) = 0;
		break;
	case GST_STATE_CHANGE_READY_TO_NULL:
		break;
	default:
		break;
	}

	return ret;
}
static gboolean
gst_gdk_pixbuf_sink_set_caps (GstBaseSink * basesink, GstCaps * caps)
{
  GstGdkPixbufSink *sink = GST_GDK_PIXBUF_SINK (basesink);
  GstVideoFormat fmt;
  gint w, h, par_n, par_d;

  GST_LOG_OBJECT (sink, "caps: %" GST_PTR_FORMAT, caps);

  if (!gst_video_format_parse_caps (caps, &fmt, &w, &h)) {
    GST_WARNING_OBJECT (sink, "parse_caps failed");
    return FALSE;
  }

  if (!gst_video_parse_caps_pixel_aspect_ratio (caps, &par_n, &par_d)) {
    GST_LOG_OBJECT (sink, "no pixel aspect ratio");
    return FALSE;
  }

  g_assert ((fmt == GST_VIDEO_FORMAT_RGB &&
          gst_video_format_get_pixel_stride (fmt, 0) == 3) ||
      (fmt == GST_VIDEO_FORMAT_RGBA &&
          gst_video_format_get_pixel_stride (fmt, 0) == 4));

  GST_VIDEO_SINK_WIDTH (sink) = w;
  GST_VIDEO_SINK_HEIGHT (sink) = h;

  sink->rowstride = gst_video_format_get_row_stride (fmt, 0, w);
  sink->has_alpha = (fmt == GST_VIDEO_FORMAT_RGBA);

  sink->par_n = par_n;
  sink->par_d = par_d;

  GST_INFO_OBJECT (sink, "format             : %d", fmt);
  GST_INFO_OBJECT (sink, "width x height     : %d x %d", w, h);
  GST_INFO_OBJECT (sink, "pixel-aspect-ratio : %d/%d", par_d, par_n);

  return TRUE;
}
Example #18
0
static GstFlowReturn
gst_glimage_sink_prepare (GstBaseSink * bsink, GstBuffer * buf)
{
  GstGLImageSink *glimage_sink;

  glimage_sink = GST_GLIMAGE_SINK (bsink);

  GST_TRACE ("preparing buffer:%p", buf);

  if (GST_VIDEO_SINK_WIDTH (glimage_sink) < 1 ||
      GST_VIDEO_SINK_HEIGHT (glimage_sink) < 1) {
    return GST_FLOW_NOT_NEGOTIATED;
  }

  if (!_ensure_gl_setup (glimage_sink))
    return GST_FLOW_NOT_NEGOTIATED;

  if (!gst_gl_upload_perform_with_buffer (glimage_sink->upload, buf,
          &glimage_sink->next_tex))
    goto upload_failed;

  if (glimage_sink->window_id != glimage_sink->new_window_id) {
    GstGLWindow *window = gst_gl_context_get_window (glimage_sink->context);

    glimage_sink->window_id = glimage_sink->new_window_id;
    gst_gl_window_set_window_handle (window, glimage_sink->window_id);

    gst_object_unref (window);
  }

  return GST_FLOW_OK;

upload_failed:
  {
    GST_ELEMENT_ERROR (glimage_sink, RESOURCE, NOT_FOUND,
        ("%s", "Failed to upload buffer"), (NULL));
    return GST_FLOW_ERROR;
  }
}
Example #19
0
static gboolean
gst_glimage_sink_redisplay (GstGLImageSink * gl_sink)
{
  GstGLWindow *window;
  gboolean alive;

  window = gst_gl_context_get_window (gl_sink->context);
  if (!window)
    return FALSE;

  if (gst_gl_window_is_running (window)) {

#if GST_GL_HAVE_GLES2
    if (USING_GLES2 (gl_sink->context)) {
      if (G_UNLIKELY (!gl_sink->redisplay_shader)) {
        gst_gl_window_send_message (window,
            GST_GL_WINDOW_CB (gst_glimage_sink_thread_init_redisplay), gl_sink);

        /* if the shader is still null it means it failed to be useable */
        if (G_UNLIKELY (!gl_sink->redisplay_shader)) {
          gst_object_unref (window);
          return FALSE;
        }
      }
    }
#endif

    /* Drawing is asynchronous: gst_gl_window_draw is not blocking
     * It means that it does not wait for stuff to be executed in other threads
     */
    gst_gl_window_draw (window, GST_VIDEO_SINK_WIDTH (gl_sink),
        GST_VIDEO_SINK_HEIGHT (gl_sink));
  }
  alive = gst_gl_window_is_running (window);
  gst_object_unref (window);

  return alive;
}
static GstStateChangeReturn
gst_glimage_sink_change_state (GstElement * element, GstStateChange transition)
{
  GstGLImageSink *glimage_sink;
  GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;

  GST_DEBUG ("change state");

  glimage_sink = GST_GLIMAGE_SINK (element);

  switch (transition) {
    case GST_STATE_CHANGE_NULL_TO_READY:
      break;
    case GST_STATE_CHANGE_READY_TO_PAUSED:
      g_atomic_int_set (&glimage_sink->to_quit, 0);
      if (!glimage_sink->display) {
        GstGLWindow *window;
        GError *error = NULL;

        if (!gst_gl_ensure_display (glimage_sink, &glimage_sink->display))
          return GST_STATE_CHANGE_FAILURE;

        glimage_sink->context = gst_gl_context_new (glimage_sink->display);
        window = gst_gl_context_get_window (glimage_sink->context);

        if (!glimage_sink->window_id && !glimage_sink->new_window_id)
          gst_video_overlay_prepare_window_handle (GST_VIDEO_OVERLAY
              (glimage_sink));

        if (glimage_sink->window_id != glimage_sink->new_window_id) {
          glimage_sink->window_id = glimage_sink->new_window_id;
          gst_gl_window_set_window_handle (window, glimage_sink->window_id);
        }

        if (!gst_gl_context_create (glimage_sink->context,
                glimage_sink->other_context, &error)) {
          GST_ELEMENT_ERROR (glimage_sink, RESOURCE, NOT_FOUND, ("%s",
                  error->message), (NULL));

          if (glimage_sink->display) {
            gst_object_unref (glimage_sink->display);
            glimage_sink->display = NULL;
          }
          gst_object_unref (glimage_sink->context);
          gst_object_unref (window);

          return GST_STATE_CHANGE_FAILURE;
        }

        /* setup callbacks */
        gst_gl_window_set_resize_callback (window,
            GST_GL_WINDOW_RESIZE_CB (gst_glimage_sink_on_resize),
            gst_object_ref (glimage_sink), (GDestroyNotify) gst_object_unref);
        gst_gl_window_set_draw_callback (window,
            GST_GL_WINDOW_CB (gst_glimage_sink_on_draw),
            gst_object_ref (glimage_sink), (GDestroyNotify) gst_object_unref);
        gst_gl_window_set_close_callback (window,
            GST_GL_WINDOW_CB (gst_glimage_sink_on_close),
            gst_object_ref (glimage_sink), (GDestroyNotify) gst_object_unref);

        gst_object_unref (window);
      }
      break;
    case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
      break;
    default:
      break;
  }

  ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
  if (ret == GST_STATE_CHANGE_FAILURE)
    return ret;

  switch (transition) {
    case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
      break;
    case GST_STATE_CHANGE_PAUSED_TO_READY:
    {
      /* mark the redisplay_texture as unavailable (=0)
       * to avoid drawing
       */
      GST_GLIMAGE_SINK_LOCK (glimage_sink);
      glimage_sink->redisplay_texture = 0;
      GST_GLIMAGE_SINK_UNLOCK (glimage_sink);

      if (glimage_sink->upload) {
        gst_object_unref (glimage_sink->upload);
        glimage_sink->upload = NULL;
      }

      glimage_sink->window_id = 0;
      //but do not reset glimage_sink->new_window_id

      if (glimage_sink->pool) {
        gst_buffer_pool_set_active (glimage_sink->pool, FALSE);
        gst_object_unref (glimage_sink->pool);
        glimage_sink->pool = NULL;
      }

      GST_VIDEO_SINK_WIDTH (glimage_sink) = 1;
      GST_VIDEO_SINK_HEIGHT (glimage_sink) = 1;
      if (glimage_sink->context) {
        GstGLWindow *window = gst_gl_context_get_window (glimage_sink->context);

        gst_gl_window_send_message (window,
            GST_GL_WINDOW_CB (gst_glimage_sink_cleanup_glthread), glimage_sink);

        gst_gl_window_set_resize_callback (window, NULL, NULL, NULL);
        gst_gl_window_set_draw_callback (window, NULL, NULL, NULL);
        gst_gl_window_set_close_callback (window, NULL, NULL, NULL);

        gst_object_unref (window);
        gst_object_unref (glimage_sink->context);
        glimage_sink->context = NULL;
        gst_object_unref (glimage_sink->display);
        glimage_sink->display = NULL;
      }
      break;
    }
    case GST_STATE_CHANGE_READY_TO_NULL:
      break;
    default:
      break;
  }

  return ret;
}
Example #21
0
static gboolean
gst_vdp_sink_setcaps (GstBaseSink * bsink, GstCaps * caps)
{
  VdpSink *vdp_sink;
  GstCaps *allowed_caps;
  gboolean ret = TRUE;
  GstStructure *structure;
  GstCaps *intersection;
  gint new_width, new_height;
  const GValue *fps;

  vdp_sink = GST_VDP_SINK (bsink);

  GST_OBJECT_LOCK (vdp_sink);
  if (!vdp_sink->device)
    return FALSE;
  GST_OBJECT_UNLOCK (vdp_sink);

  allowed_caps = gst_pad_get_caps (GST_BASE_SINK_PAD (bsink));
  GST_DEBUG_OBJECT (vdp_sink,
      "sinkconnect possible caps %" GST_PTR_FORMAT " with given caps %"
      GST_PTR_FORMAT, allowed_caps, caps);

  /* We intersect those caps with our template to make sure they are correct */
  intersection = gst_caps_intersect (allowed_caps, caps);
  gst_caps_unref (allowed_caps);

  GST_DEBUG_OBJECT (vdp_sink, "intersection returned %" GST_PTR_FORMAT,
      intersection);
  if (gst_caps_is_empty (intersection)) {
    gst_caps_unref (intersection);
    return FALSE;
  }

  gst_caps_unref (intersection);

  structure = gst_caps_get_structure (caps, 0);

  ret &= gst_structure_get_int (structure, "width", &new_width);
  ret &= gst_structure_get_int (structure, "height", &new_height);
  fps = gst_structure_get_value (structure, "framerate");
  ret &= (fps != NULL);
  if (!ret)
    return FALSE;

  GST_VIDEO_SINK_WIDTH (vdp_sink) = new_width;
  GST_VIDEO_SINK_HEIGHT (vdp_sink) = new_height;
  vdp_sink->fps_n = gst_value_get_fraction_numerator (fps);
  vdp_sink->fps_d = gst_value_get_fraction_denominator (fps);

  gst_vdp_buffer_pool_set_caps (vdp_sink->bpool, caps);

  /* Notify application to set xwindow id now */
  g_mutex_lock (vdp_sink->flow_lock);
  if (!vdp_sink->window) {
    g_mutex_unlock (vdp_sink->flow_lock);
    gst_x_overlay_prepare_xwindow_id (GST_X_OVERLAY (vdp_sink));
  } else {
    g_mutex_unlock (vdp_sink->flow_lock);
  }

  /* Creating our window and our image */
  if (GST_VIDEO_SINK_WIDTH (vdp_sink) <= 0
      || GST_VIDEO_SINK_HEIGHT (vdp_sink) <= 0) {
    GST_ELEMENT_ERROR (vdp_sink, CORE, NEGOTIATION, (NULL),
        ("Invalid image size."));
    return FALSE;
  }

  g_mutex_lock (vdp_sink->flow_lock);
  if (!vdp_sink->window) {
    vdp_sink->window = gst_vdp_sink_window_new (vdp_sink,
        GST_VIDEO_SINK_WIDTH (vdp_sink), GST_VIDEO_SINK_HEIGHT (vdp_sink));
  }
  g_mutex_unlock (vdp_sink->flow_lock);

  return TRUE;
}
Example #22
0
static void
gst_vdp_sink_set_xwindow_id (GstXOverlay * overlay, XID xwindow_id)
{
  VdpSink *vdp_sink = GST_VDP_SINK (overlay);
  GstVdpWindow *window = NULL;
  XWindowAttributes attr;

  /* We acquire the stream lock while setting this window in the element.
     We are basically cleaning tons of stuff replacing the old window, putting
     images while we do that would surely crash */
  g_mutex_lock (vdp_sink->flow_lock);

  /* If we already use that window return */
  if (vdp_sink->window && (xwindow_id == vdp_sink->window->win)) {
    g_mutex_unlock (vdp_sink->flow_lock);
    return;
  }

  /* If the element has not initialized the X11 context try to do so */
  if (!gst_vdp_sink_open_device (vdp_sink)) {
    g_mutex_unlock (vdp_sink->flow_lock);
    /* we have thrown a GST_ELEMENT_ERROR now */
    return;
  }

  /* If a window is there already we destroy it */
  if (vdp_sink->window) {
    gst_vdp_sink_window_destroy (vdp_sink, vdp_sink->window);
    vdp_sink->window = NULL;
  }

  /* If the xid is 0 we go back to an internal window */
  if (xwindow_id == 0) {
    /* If no width/height caps nego did not happen window will be created
       during caps nego then */
    if (GST_VIDEO_SINK_WIDTH (vdp_sink) && GST_VIDEO_SINK_HEIGHT (vdp_sink)) {
      window = gst_vdp_sink_window_new (vdp_sink,
          GST_VIDEO_SINK_WIDTH (vdp_sink), GST_VIDEO_SINK_HEIGHT (vdp_sink));
    }
  } else {
    window = g_new0 (GstVdpWindow, 1);

    window->win = xwindow_id;

    /* We get window geometry, set the event we want to receive,
       and create a GC */
    g_mutex_lock (vdp_sink->x_lock);
    XGetWindowAttributes (vdp_sink->device->display, window->win, &attr);
    window->width = attr.width;
    window->height = attr.height;
    window->internal = FALSE;
    if (vdp_sink->handle_events) {
      XSelectInput (vdp_sink->device->display, window->win, ExposureMask |
          StructureNotifyMask | PointerMotionMask | KeyPressMask |
          KeyReleaseMask);
    }

    g_mutex_unlock (vdp_sink->x_lock);
  }

  if (window)
    vdp_sink->window = window;

  g_mutex_unlock (vdp_sink->flow_lock);
}
static gboolean
gst_glimage_sink_set_caps (GstBaseSink * bsink, GstCaps * caps)
{
  GstGLImageSink *glimage_sink;
  gint width;
  gint height;
  gboolean ok;
  gint par_n, par_d;
  gint display_par_n, display_par_d;
  guint display_ratio_num, display_ratio_den;
  GstVideoInfo vinfo;
  GstStructure *structure;
  GstBufferPool *newpool, *oldpool;

  GST_DEBUG ("set caps with %" GST_PTR_FORMAT, caps);

  glimage_sink = GST_GLIMAGE_SINK (bsink);

  ok = gst_video_info_from_caps (&vinfo, caps);
  if (!ok)
    return FALSE;

  width = GST_VIDEO_INFO_WIDTH (&vinfo);
  height = GST_VIDEO_INFO_HEIGHT (&vinfo);

  if (glimage_sink->tex_id)
    gst_gl_context_del_texture (glimage_sink->context, &glimage_sink->tex_id);
  //FIXME: this texture seems to be never deleted when going to STATE_NULL
  gst_gl_context_gen_texture (glimage_sink->context, &glimage_sink->tex_id,
      GST_VIDEO_INFO_FORMAT (&vinfo), width, height);

  par_n = GST_VIDEO_INFO_PAR_N (&vinfo);
  par_d = GST_VIDEO_INFO_PAR_D (&vinfo);

  if (!par_n)
    par_n = 1;

  /* get display's PAR */
  if (glimage_sink->par_n != 0 && glimage_sink->par_d != 0) {
    display_par_n = glimage_sink->par_n;
    display_par_d = glimage_sink->par_d;
  } else {
    display_par_n = 1;
    display_par_d = 1;
  }

  ok = gst_video_calculate_display_ratio (&display_ratio_num,
      &display_ratio_den, width, height, par_n, par_d, display_par_n,
      display_par_d);

  if (!ok)
    return FALSE;

  GST_TRACE ("PAR: %u/%u DAR:%u/%u", par_n, par_d, display_par_n,
      display_par_d);

  if (height % display_ratio_den == 0) {
    GST_DEBUG ("keeping video height");
    GST_VIDEO_SINK_WIDTH (glimage_sink) = (guint)
        gst_util_uint64_scale_int (height, display_ratio_num,
        display_ratio_den);
    GST_VIDEO_SINK_HEIGHT (glimage_sink) = height;
  } else if (width % display_ratio_num == 0) {
    GST_DEBUG ("keeping video width");
    GST_VIDEO_SINK_WIDTH (glimage_sink) = width;
    GST_VIDEO_SINK_HEIGHT (glimage_sink) = (guint)
        gst_util_uint64_scale_int (width, display_ratio_den, display_ratio_num);
  } else {
    GST_DEBUG ("approximating while keeping video height");
    GST_VIDEO_SINK_WIDTH (glimage_sink) = (guint)
        gst_util_uint64_scale_int (height, display_ratio_num,
        display_ratio_den);
    GST_VIDEO_SINK_HEIGHT (glimage_sink) = height;
  }
  GST_DEBUG ("scaling to %dx%d", GST_VIDEO_SINK_WIDTH (glimage_sink),
      GST_VIDEO_SINK_HEIGHT (glimage_sink));

  glimage_sink->info = vinfo;

  newpool = gst_gl_buffer_pool_new (glimage_sink->context);
  structure = gst_buffer_pool_get_config (newpool);
  gst_buffer_pool_config_set_params (structure, caps, vinfo.size, 2, 0);
  gst_buffer_pool_set_config (newpool, structure);

  oldpool = glimage_sink->pool;
  /* we don't activate the pool yet, this will be done by downstream after it
   * has configured the pool. If downstream does not want our pool we will
   * activate it when we render into it */
  glimage_sink->pool = newpool;

  /* unref the old sink */
  if (oldpool) {
    /* we don't deactivate, some elements might still be using it, it will
     * be deactivated when the last ref is gone */
    gst_object_unref (oldpool);
  }

  return TRUE;
}
Example #24
0
/* Process pending events. Call with ->lock held */
static void
gst_sdlv_process_events (GstSDLVideoSink * sdlvideosink)
{
  SDL_Event event;
  int numevents;
  char *keysym = NULL;

  do {
    SDL_PumpEvents ();
    numevents = SDL_PeepEvents (&event, 1, SDL_GETEVENT,
        SDL_KEYDOWNMASK | SDL_KEYUPMASK |
        SDL_MOUSEMOTIONMASK | SDL_MOUSEBUTTONDOWNMASK |
        SDL_MOUSEBUTTONUPMASK | SDL_QUITMASK | SDL_VIDEORESIZEMASK);

    if (numevents > 0 && (event.type == SDL_KEYUP || event.type == SDL_KEYDOWN)) {
      keysym = SDL_GetKeyName (event.key.keysym.sym);
    }

    if (numevents > 0) {
      g_mutex_unlock (sdlvideosink->lock);
      switch (event.type) {
        case SDL_MOUSEMOTION:
          gst_navigation_send_mouse_event (GST_NAVIGATION (sdlvideosink),
              "mouse-move", 0, event.motion.x, event.motion.y);
          break;
        case SDL_MOUSEBUTTONDOWN:
          gst_navigation_send_mouse_event (GST_NAVIGATION (sdlvideosink),
              "mouse-button-press",
              event.button.button, event.button.x, event.button.y);
          break;
        case SDL_MOUSEBUTTONUP:
          gst_navigation_send_mouse_event (GST_NAVIGATION (sdlvideosink),
              "mouse-button-release",
              event.button.button, event.button.x, event.button.y);
          break;
        case SDL_KEYUP:
          GST_DEBUG ("key press event %s !",
              SDL_GetKeyName (event.key.keysym.sym));
          gst_navigation_send_key_event (GST_NAVIGATION (sdlvideosink),
              "key-release", keysym);
          break;
        case SDL_KEYDOWN:
          if (SDLK_ESCAPE != event.key.keysym.sym) {
            GST_DEBUG ("key press event %s !",
                SDL_GetKeyName (event.key.keysym.sym));
            gst_navigation_send_key_event (GST_NAVIGATION (sdlvideosink),
                "key-press", keysym);
            break;
          } else {
            /* fall through */
          }
        case SDL_QUIT:
          sdlvideosink->running = FALSE;
          GST_ELEMENT_ERROR (sdlvideosink, RESOURCE, OPEN_WRITE,
              ("Video output device is gone."),
              ("We were running fullscreen and user "
                  "pressed the ESC key, stopping playback."));
          break;
        case SDL_VIDEORESIZE:
          /* create a SDL window of the size requested by the user */
          g_mutex_lock (sdlvideosink->lock);
          GST_VIDEO_SINK_WIDTH (sdlvideosink) = event.resize.w;
          GST_VIDEO_SINK_HEIGHT (sdlvideosink) = event.resize.h;
          gst_sdlvideosink_create (sdlvideosink);
          g_mutex_unlock (sdlvideosink->lock);
          break;
      }
      g_mutex_lock (sdlvideosink->lock);
    }
  } while (numevents > 0);
}
static GstFlowReturn
gst_glimage_sink_render (GstBaseSink * bsink, GstBuffer * buf)
{
  GstGLImageSink *glimage_sink;
  guint tex_id;

  GST_TRACE ("rendering buffer:%p", buf);

  glimage_sink = GST_GLIMAGE_SINK (bsink);

  if (GST_VIDEO_SINK_WIDTH (glimage_sink) < 1 ||
      GST_VIDEO_SINK_HEIGHT (glimage_sink) < 1) {
    return GST_FLOW_NOT_NEGOTIATED;
  }

  if (!_ensure_gl_setup (glimage_sink))
    return GST_FLOW_NOT_NEGOTIATED;

  if (!gst_gl_upload_perform_with_buffer (glimage_sink->upload, buf, &tex_id))
    goto upload_failed;

  if (glimage_sink->window_id != glimage_sink->new_window_id) {
    GstGLWindow *window = gst_gl_context_get_window (glimage_sink->context);

    glimage_sink->window_id = glimage_sink->new_window_id;
    gst_gl_window_set_window_handle (window, glimage_sink->window_id);

    gst_object_unref (window);
  }

  GST_TRACE ("redisplay texture:%u of size:%ux%u, window size:%ux%u", tex_id,
      GST_VIDEO_INFO_WIDTH (&glimage_sink->info),
      GST_VIDEO_INFO_HEIGHT (&glimage_sink->info),
      GST_VIDEO_SINK_WIDTH (glimage_sink),
      GST_VIDEO_SINK_HEIGHT (glimage_sink));

  /* Avoid to release the texture while drawing */
  GST_GLIMAGE_SINK_LOCK (glimage_sink);
  glimage_sink->redisplay_texture = tex_id;
  GST_GLIMAGE_SINK_UNLOCK (glimage_sink);

  /* Ask the underlying window to redraw its content */
  if (!gst_glimage_sink_redisplay (glimage_sink))
    goto redisplay_failed;

  GST_TRACE ("post redisplay");

  if (g_atomic_int_get (&glimage_sink->to_quit) != 0) {
    GST_ELEMENT_ERROR (glimage_sink, RESOURCE, NOT_FOUND,
        ("%s", gst_gl_context_get_error ()), (NULL));
    gst_gl_upload_release_buffer (glimage_sink->upload);
    return GST_FLOW_ERROR;
  }

  gst_gl_upload_release_buffer (glimage_sink->upload);
  return GST_FLOW_OK;

/* ERRORS */
redisplay_failed:
  {
    gst_gl_upload_release_buffer (glimage_sink->upload);
    GST_ELEMENT_ERROR (glimage_sink, RESOURCE, NOT_FOUND,
        ("%s", gst_gl_context_get_error ()), (NULL));
    return GST_FLOW_ERROR;
  }

upload_failed:
  {
    GST_ELEMENT_ERROR (glimage_sink, RESOURCE, NOT_FOUND,
        ("%s", "Failed to upload format"), (NULL));
    return FALSE;
  }
}
Example #26
0
static gboolean
gst_eglglessink_configure_caps (GstEglGlesSink * eglglessink, GstCaps * caps)
{
  gboolean ret = TRUE;
  gint width, height;
  int par_n, par_d;
  guintptr used_window = 0;

  if (!(ret = gst_video_format_parse_caps (caps, &eglglessink->format, &width,
              &height))) {
    GST_ERROR_OBJECT (eglglessink, "Got weird and/or incomplete caps");
    goto HANDLE_ERROR;
  }

  if (!(ret = gst_video_parse_caps_pixel_aspect_ratio (caps, &par_n, &par_d))) {
    par_n = 1;
    par_d = 1;
    GST_WARNING_OBJECT (eglglessink,
        "Can't parse PAR from caps. Using default: 1");
  }

  eglglessink->size_changed = (GST_VIDEO_SINK_WIDTH (eglglessink) != width ||
      GST_VIDEO_SINK_HEIGHT (eglglessink) != height ||
      eglglessink->par_n != par_n || eglglessink->par_d != par_d);

  eglglessink->par_n = par_n;
  eglglessink->par_d = par_d;
  GST_VIDEO_SINK_WIDTH (eglglessink) = width;
  GST_VIDEO_SINK_HEIGHT (eglglessink) = height;

  if (eglglessink->configured_caps) {
    GST_DEBUG_OBJECT (eglglessink, "Caps were already set");
    if (gst_caps_can_intersect (caps, eglglessink->configured_caps)) {
      GST_DEBUG_OBJECT (eglglessink, "Caps are compatible anyway");
      goto SUCCEED;
    }

    GST_DEBUG_OBJECT (eglglessink, "Caps are not compatible, reconfiguring");

    /* EGL/GLES cleanup */
    gst_egl_adaptation_cleanup (eglglessink->egl_context);

    gst_caps_unref (eglglessink->configured_caps);
    eglglessink->configured_caps = NULL;
  }

  if (!gst_egl_adaptation_choose_config (eglglessink->egl_context)) {
    GST_ERROR_OBJECT (eglglessink, "Couldn't choose EGL config");
    goto HANDLE_ERROR;
  }

  gst_caps_replace (&eglglessink->configured_caps, caps);

  /* By now the application should have set a window
   * if it meant to do so
   */
  GST_OBJECT_LOCK (eglglessink);
  if (!eglglessink->have_window) {

    GST_INFO_OBJECT (eglglessink,
        "No window. Will attempt internal window creation");
    if (!gst_eglglessink_create_window (eglglessink, width, height)) {
      GST_ERROR_OBJECT (eglglessink, "Internal window creation failed!");
      GST_OBJECT_UNLOCK (eglglessink);
      goto HANDLE_ERROR;
    }
    eglglessink->using_own_window = TRUE;
    gst_egl_adaptation_update_used_window (eglglessink->egl_context);
    eglglessink->have_window = TRUE;
  }
  used_window = gst_egl_adaptation_get_window (eglglessink->egl_context);
  GST_OBJECT_UNLOCK (eglglessink);
  gst_x_overlay_got_window_handle (GST_X_OVERLAY (eglglessink),
      (guintptr) used_window);

  if (!eglglessink->egl_context->have_surface) {
    if (!gst_egl_adaptation_init_egl_surface (eglglessink->egl_context,
            eglglessink->format)) {
      GST_ERROR_OBJECT (eglglessink, "Couldn't init EGL surface from window");
      goto HANDLE_ERROR;
    }
  }

SUCCEED:
  GST_INFO_OBJECT (eglglessink, "Configured caps successfully");
  return TRUE;

HANDLE_ERROR:
  GST_ERROR_OBJECT (eglglessink, "Configuring caps failed");
  return FALSE;
}
Example #27
0
/* Must be called with the sdl lock held */
static gboolean
gst_sdlvideosink_create (GstSDLVideoSink * sdlvideosink)
{
  if (GST_VIDEO_SINK_HEIGHT (sdlvideosink) <= 0)
    GST_VIDEO_SINK_HEIGHT (sdlvideosink) = sdlvideosink->height;
  if (GST_VIDEO_SINK_WIDTH (sdlvideosink) <= 0)
    GST_VIDEO_SINK_WIDTH (sdlvideosink) = sdlvideosink->width;

  gst_sdlvideosink_destroy (sdlvideosink);

  if (sdlvideosink->is_xwindows && !sdlvideosink->xwindow_id) {
    g_mutex_unlock (sdlvideosink->lock);
    gst_x_overlay_prepare_xwindow_id (GST_X_OVERLAY (sdlvideosink));
    g_mutex_lock (sdlvideosink->lock);
  }

  /* create a SDL window of the size requested by the user */
  if (sdlvideosink->full_screen) {
    sdlvideosink->screen =
        SDL_SetVideoMode (GST_VIDEO_SINK_WIDTH (sdlvideosink),
        GST_VIDEO_SINK_HEIGHT (sdlvideosink), 0,
        SDL_SWSURFACE | SDL_FULLSCREEN);
  } else {
    sdlvideosink->screen =
        SDL_SetVideoMode (GST_VIDEO_SINK_WIDTH (sdlvideosink),
        GST_VIDEO_SINK_HEIGHT (sdlvideosink), 0, SDL_HWSURFACE | SDL_RESIZABLE);
  }
  if (sdlvideosink->screen == NULL)
    goto no_screen;

  /* create a new YUV overlay */
  sdlvideosink->overlay = SDL_CreateYUVOverlay (sdlvideosink->width,
      sdlvideosink->height, sdlvideosink->format, sdlvideosink->screen);
  if (sdlvideosink->overlay == NULL)
    goto no_overlay;


  GST_DEBUG ("Using a %dx%d %dbpp SDL screen with a %dx%d \'%"
      GST_FOURCC_FORMAT "\' YUV overlay", GST_VIDEO_SINK_WIDTH (sdlvideosink),
      GST_VIDEO_SINK_HEIGHT (sdlvideosink),
      sdlvideosink->screen->format->BitsPerPixel, sdlvideosink->width,
      sdlvideosink->height, GST_FOURCC_ARGS (sdlvideosink->format));

  sdlvideosink->rect.x = 0;
  sdlvideosink->rect.y = 0;
  sdlvideosink->rect.w = GST_VIDEO_SINK_WIDTH (sdlvideosink);
  sdlvideosink->rect.h = GST_VIDEO_SINK_HEIGHT (sdlvideosink);

  /*SDL_DisplayYUVOverlay (sdlvideosink->overlay, &(sdlvideosink->rect)); */

  GST_DEBUG ("sdlvideosink: setting %08x (%" GST_FOURCC_FORMAT ")",
      sdlvideosink->format, GST_FOURCC_ARGS (sdlvideosink->format));

  return TRUE;

  /* ERRORS */
no_screen:
  {
    GST_ELEMENT_ERROR (sdlvideosink, LIBRARY, TOO_LAZY, (NULL),
        ("SDL: Couldn't set %dx%d: %s", GST_VIDEO_SINK_WIDTH (sdlvideosink),
            GST_VIDEO_SINK_HEIGHT (sdlvideosink), SDL_GetError ()));
    return FALSE;
  }
no_overlay:
  {
    GST_ELEMENT_ERROR (sdlvideosink, LIBRARY, TOO_LAZY, (NULL),
        ("SDL: Couldn't create SDL YUV overlay (%dx%d \'%" GST_FOURCC_FORMAT
            "\'): %s", sdlvideosink->width, sdlvideosink->height,
            GST_FOURCC_ARGS (sdlvideosink->format), SDL_GetError ()));
    return FALSE;
  }
}
Example #28
0
static gboolean
gst_eglglessink_fill_texture (GstEglGlesSink * eglglessink, GstBuffer * buf)
{
  gint w, h;

  w = GST_VIDEO_SINK_WIDTH (eglglessink);
  h = GST_VIDEO_SINK_HEIGHT (eglglessink);

  GST_DEBUG_OBJECT (eglglessink,
      "Got good buffer %p. Sink geometry is %dx%d size %d", buf, w, h,
      buf ? GST_BUFFER_SIZE (buf) : -1);

  switch (eglglessink->format) {
    case GST_VIDEO_FORMAT_RGBA:
    case GST_VIDEO_FORMAT_BGRA:
    case GST_VIDEO_FORMAT_ARGB:
    case GST_VIDEO_FORMAT_ABGR:
    case GST_VIDEO_FORMAT_RGBx:
    case GST_VIDEO_FORMAT_BGRx:
    case GST_VIDEO_FORMAT_xRGB:
    case GST_VIDEO_FORMAT_xBGR:
      glActiveTexture (GL_TEXTURE0);
      glBindTexture (GL_TEXTURE_2D, eglglessink->egl_context->texture[0]);
      glTexImage2D (GL_TEXTURE_2D, 0, GL_RGBA, w, h, 0, GL_RGBA,
          GL_UNSIGNED_BYTE, GST_BUFFER_DATA (buf));
      break;
    case GST_VIDEO_FORMAT_AYUV:
      glActiveTexture (GL_TEXTURE0);
      glBindTexture (GL_TEXTURE_2D, eglglessink->egl_context->texture[0]);
      glTexImage2D (GL_TEXTURE_2D, 0, GL_RGBA, w, h, 0, GL_RGBA,
          GL_UNSIGNED_BYTE, GST_BUFFER_DATA (buf));
      break;
    case GST_VIDEO_FORMAT_Y444:
    case GST_VIDEO_FORMAT_I420:
    case GST_VIDEO_FORMAT_YV12:
    case GST_VIDEO_FORMAT_Y42B:
    case GST_VIDEO_FORMAT_Y41B:{
      gint coffset, cw, ch;

      coffset =
          gst_video_format_get_component_offset (eglglessink->format, 0, w, h);
      cw = gst_video_format_get_component_width (eglglessink->format, 0, w);
      ch = gst_video_format_get_component_height (eglglessink->format, 0, h);
      glActiveTexture (GL_TEXTURE0);
      glBindTexture (GL_TEXTURE_2D, eglglessink->egl_context->texture[0]);
      glTexImage2D (GL_TEXTURE_2D, 0, GL_LUMINANCE, cw, ch, 0,
          GL_LUMINANCE, GL_UNSIGNED_BYTE, GST_BUFFER_DATA (buf) + coffset);
      coffset =
          gst_video_format_get_component_offset (eglglessink->format, 1, w, h);
      cw = gst_video_format_get_component_width (eglglessink->format, 1, w);
      ch = gst_video_format_get_component_height (eglglessink->format, 1, h);
      glActiveTexture (GL_TEXTURE1);
      glBindTexture (GL_TEXTURE_2D, eglglessink->egl_context->texture[1]);
      glTexImage2D (GL_TEXTURE_2D, 0, GL_LUMINANCE, cw, ch, 0,
          GL_LUMINANCE, GL_UNSIGNED_BYTE, GST_BUFFER_DATA (buf) + coffset);
      coffset =
          gst_video_format_get_component_offset (eglglessink->format, 2, w, h);
      cw = gst_video_format_get_component_width (eglglessink->format, 2, w);
      ch = gst_video_format_get_component_height (eglglessink->format, 2, h);
      glActiveTexture (GL_TEXTURE2);
      glBindTexture (GL_TEXTURE_2D, eglglessink->egl_context->texture[2]);
      glTexImage2D (GL_TEXTURE_2D, 0, GL_LUMINANCE, cw, ch, 0,
          GL_LUMINANCE, GL_UNSIGNED_BYTE, GST_BUFFER_DATA (buf) + coffset);
      break;
    }
    case GST_VIDEO_FORMAT_YUY2:
    case GST_VIDEO_FORMAT_YVYU:
    case GST_VIDEO_FORMAT_UYVY:
      glActiveTexture (GL_TEXTURE0);
      glBindTexture (GL_TEXTURE_2D, eglglessink->egl_context->texture[0]);
      glTexImage2D (GL_TEXTURE_2D, 0, GL_LUMINANCE_ALPHA, w, h, 0,
          GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, GST_BUFFER_DATA (buf));
      glActiveTexture (GL_TEXTURE1);
      glBindTexture (GL_TEXTURE_2D, eglglessink->egl_context->texture[1]);
      glTexImage2D (GL_TEXTURE_2D, 0, GL_RGBA, GST_ROUND_UP_2 (w) / 2,
          h, 0, GL_RGBA, GL_UNSIGNED_BYTE, GST_BUFFER_DATA (buf));
      break;
    case GST_VIDEO_FORMAT_NV12:
    case GST_VIDEO_FORMAT_NV21:{
      gint coffset, cw, ch;

      coffset =
          gst_video_format_get_component_offset (eglglessink->format, 0, w, h);
      cw = gst_video_format_get_component_width (eglglessink->format, 0, w);
      ch = gst_video_format_get_component_height (eglglessink->format, 0, h);
      glActiveTexture (GL_TEXTURE0);
      glBindTexture (GL_TEXTURE_2D, eglglessink->egl_context->texture[0]);
      glTexImage2D (GL_TEXTURE_2D, 0, GL_LUMINANCE, cw, ch, 0,
          GL_LUMINANCE, GL_UNSIGNED_BYTE, GST_BUFFER_DATA (buf) + coffset);

      coffset =
          gst_video_format_get_component_offset (eglglessink->format,
          (eglglessink->format == GST_VIDEO_FORMAT_NV12 ? 1 : 2), w, h);
      cw = gst_video_format_get_component_width (eglglessink->format, 1, w);
      ch = gst_video_format_get_component_height (eglglessink->format, 1, h);
      glActiveTexture (GL_TEXTURE1);
      glBindTexture (GL_TEXTURE_2D, eglglessink->egl_context->texture[1]);
      glTexImage2D (GL_TEXTURE_2D, 0, GL_LUMINANCE_ALPHA, cw, ch, 0,
          GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE,
          GST_BUFFER_DATA (buf) + coffset);
      break;
    }
    default:
      g_assert_not_reached ();
      break;
  }

  if (got_gl_error ("glTexImage2D"))
    goto HANDLE_ERROR;

  return TRUE;

HANDLE_ERROR:
  return FALSE;
}
/* this function handles the link with other elements */
static gboolean
gst_slvideo_set_caps (GstBaseSink * bsink, GstCaps * caps)
{
	GstSLVideo *filter;
	GstStructure *structure;
	GstCaps *intersection;
	
	GST_DEBUG ("set caps with %" GST_PTR_FORMAT, caps);
	
	filter = GST_SLVIDEO(bsink);
	
	intersection = gst_caps_intersect (filter->caps, caps);
	if (gst_caps_is_empty (intersection))
	{
		// no overlap between our caps and requested caps
		return FALSE;
	}
	gst_caps_unref(intersection);
	
	int width = 0;
	int height = 0;
	gboolean ret;
	const GValue *fps;
	const GValue *par;
	structure = gst_caps_get_structure (caps, 0);
	ret = gst_structure_get_int (structure, "width", &width);
	ret = ret && gst_structure_get_int (structure, "height", &height);
	fps = gst_structure_get_value (structure, "framerate");
	ret = ret && (fps != NULL);
	par = gst_structure_get_value (structure, "pixel-aspect-ratio");
	if (!ret)
		return FALSE;

	filter->width = width;
	filter->height = height;
	filter->fps_n = gst_value_get_fraction_numerator(fps);
	filter->fps_d = gst_value_get_fraction_denominator(fps);
	if (par)
	{
		filter->par_n = gst_value_get_fraction_numerator(par);
		filter->par_d = gst_value_get_fraction_denominator(par);
	}
	else
	{
		filter->par_n = 1;
		filter->par_d = 1;
	}
	GST_VIDEO_SINK_WIDTH(filter) = width;
	GST_VIDEO_SINK_HEIGHT(filter) = height;
	
	filter->format = SLV_PF_UNKNOWN;
	if (0 == strcmp(gst_structure_get_name(structure),
			"video/x-raw-rgb"))
	{
		int red_mask;
		int green_mask;
		int blue_mask;
		gst_structure_get_int(structure, "red_mask", &red_mask);
		gst_structure_get_int(structure, "green_mask", &green_mask);
		gst_structure_get_int(structure, "blue_mask", &blue_mask);
		if ((unsigned int)red_mask   == 0xFF000000 &&
		    (unsigned int)green_mask == 0x00FF0000 &&
		    (unsigned int)blue_mask  == 0x0000FF00)
		{
			filter->format = SLV_PF_RGBX;
			//fprintf(stderr, "\n\nPIXEL FORMAT RGB\n\n");
		} else if ((unsigned int)red_mask   == 0x0000FF00 &&
			   (unsigned int)green_mask == 0x00FF0000 &&
			   (unsigned int)blue_mask  == 0xFF000000)
		{
			filter->format = SLV_PF_BGRX;
			//fprintf(stderr, "\n\nPIXEL FORMAT BGR\n\n");
		}
	}
	
	return TRUE;
}
Example #30
0
static GstFlowReturn
gst_eglglessink_render (GstEglGlesSink * eglglessink)
{
  guint dar_n, dar_d;
  gint i;
  gint w, h;

  w = GST_VIDEO_SINK_WIDTH (eglglessink);
  h = GST_VIDEO_SINK_HEIGHT (eglglessink);

  /* If no one has set a display rectangle on us initialize
   * a sane default. According to the docs on the xOverlay
   * interface we are supposed to fill the overlay 100%. We
   * do this trying to take PAR/DAR into account unless the
   * calling party explicitly ask us not to by setting
   * force_aspect_ratio to FALSE.
   */
  if (gst_egl_adaptation_update_surface_dimensions
      (eglglessink->egl_context) || eglglessink->render_region_changed
      || !eglglessink->display_region.w || !eglglessink->display_region.h
      || eglglessink->size_changed) {
    GST_OBJECT_LOCK (eglglessink);

    if (!eglglessink->render_region_user) {
      eglglessink->render_region.x = 0;
      eglglessink->render_region.y = 0;
      eglglessink->render_region.w = eglglessink->egl_context->surface_width;
      eglglessink->render_region.h = eglglessink->egl_context->surface_height;
    }
    eglglessink->render_region_changed = FALSE;
    eglglessink->size_changed = FALSE;

    if (!eglglessink->force_aspect_ratio) {
      eglglessink->display_region.x = 0;
      eglglessink->display_region.y = 0;
      eglglessink->display_region.w = eglglessink->render_region.w;
      eglglessink->display_region.h = eglglessink->render_region.h;
    } else {
      GstVideoRectangle frame;

      frame.x = 0;
      frame.y = 0;

      if (!gst_video_calculate_display_ratio (&dar_n, &dar_d,
              w, h,
              eglglessink->par_n,
              eglglessink->par_d,
              eglglessink->egl_context->pixel_aspect_ratio_n,
              eglglessink->egl_context->pixel_aspect_ratio_d)) {
        GST_WARNING_OBJECT (eglglessink, "Could not compute resulting DAR");
        frame.w = w;
        frame.h = h;
      } else {
        /* Find suitable matching new size acording to dar & par
         * rationale for prefering leaving the height untouched
         * comes from interlacing considerations.
         * XXX: Move this to gstutils?
         */
        if (h % dar_d == 0) {
          frame.w = gst_util_uint64_scale_int (h, dar_n, dar_d);
          frame.h = h;
        } else if (w % dar_n == 0) {
          frame.h = gst_util_uint64_scale_int (w, dar_d, dar_n);
          frame.w = w;
        } else {
          /* Neither width nor height can be precisely scaled.
           * Prefer to leave height untouched. See comment above.
           */
          frame.w = gst_util_uint64_scale_int (h, dar_n, dar_d);
          frame.h = h;
        }
      }

      gst_video_sink_center_rect (frame, eglglessink->render_region,
          &eglglessink->display_region, TRUE);
    }

    glViewport (eglglessink->render_region.x,
        eglglessink->egl_context->surface_height -
        eglglessink->render_region.y -
        eglglessink->render_region.h,
        eglglessink->render_region.w, eglglessink->render_region.h);

    /* Clear the surface once if its content is preserved */
    if (eglglessink->egl_context->buffer_preserved) {
      glClearColor (0.0, 0.0, 0.0, 1.0);
      glClear (GL_COLOR_BUFFER_BIT);
    }

    if (!gst_eglglessink_setup_vbo (eglglessink, FALSE)) {
      GST_OBJECT_UNLOCK (eglglessink);
      GST_ERROR_OBJECT (eglglessink, "VBO setup failed");
      goto HANDLE_ERROR;
    }
    GST_OBJECT_UNLOCK (eglglessink);
  }

  if (!eglglessink->egl_context->buffer_preserved) {
    /* Draw black borders */
    GST_DEBUG_OBJECT (eglglessink, "Drawing black border 1");
    glUseProgram (eglglessink->egl_context->glslprogram[1]);

    glVertexAttribPointer (eglglessink->egl_context->position_loc[1], 3,
        GL_FLOAT, GL_FALSE, sizeof (coord5), (gpointer) (4 * sizeof (coord5)));
    if (got_gl_error ("glVertexAttribPointer"))
      goto HANDLE_ERROR;

    glDrawElements (GL_TRIANGLE_STRIP, 4, GL_UNSIGNED_SHORT, 0);
    if (got_gl_error ("glDrawElements"))
      goto HANDLE_ERROR;

    GST_DEBUG_OBJECT (eglglessink, "Drawing black border 2");

    glVertexAttribPointer (eglglessink->egl_context->position_loc[1], 3,
        GL_FLOAT, GL_FALSE, sizeof (coord5), (gpointer) (8 * sizeof (coord5)));
    if (got_gl_error ("glVertexAttribPointer"))
      goto HANDLE_ERROR;

    glDrawElements (GL_TRIANGLE_STRIP, 4, GL_UNSIGNED_SHORT, 0);
    if (got_gl_error ("glDrawElements"))
      goto HANDLE_ERROR;
  }

  /* Draw video frame */
  GST_DEBUG_OBJECT (eglglessink, "Drawing video frame");
  glUseProgram (eglglessink->egl_context->glslprogram[0]);

  for (i = 0; i < eglglessink->egl_context->n_textures; i++) {
    glUniform1i (eglglessink->egl_context->tex_loc[0][i], i);
    if (got_gl_error ("glUniform1i"))
      goto HANDLE_ERROR;
  }

  glVertexAttribPointer (eglglessink->egl_context->position_loc[0], 3,
      GL_FLOAT, GL_FALSE, sizeof (coord5), (gpointer) (0 * sizeof (coord5)));
  if (got_gl_error ("glVertexAttribPointer"))
    goto HANDLE_ERROR;

  glVertexAttribPointer (eglglessink->egl_context->texpos_loc[0], 2, GL_FLOAT,
      GL_FALSE, sizeof (coord5), (gpointer) (3 * sizeof (gfloat)));
  if (got_gl_error ("glVertexAttribPointer"))
    goto HANDLE_ERROR;

  glDrawElements (GL_TRIANGLE_STRIP, 4, GL_UNSIGNED_SHORT, 0);
  if (got_gl_error ("glDrawElements"))
    goto HANDLE_ERROR;

  if (!gst_egl_adaptation_swap_buffers (eglglessink->egl_context)) {
    goto HANDLE_ERROR;
  }

  GST_DEBUG_OBJECT (eglglessink, "Succesfully rendered 1 frame");
  return GST_FLOW_OK;

HANDLE_ERROR:
  GST_ERROR_OBJECT (eglglessink, "Rendering disabled for this frame");

  return GST_FLOW_ERROR;
}