static gboolean
empathy_video_widget_draw (GtkWidget *widget,
    cairo_t *cr)
{
  EmpathyVideoWidget *self = EMPATHY_VIDEO_WIDGET (widget);
  EmpathyVideoWidgetPriv *priv = GET_PRIV (self);
  GtkAllocation allocation;

  if (priv->overlay == NULL)
    {
      gtk_widget_get_allocation (widget, &allocation);

      gtk_render_frame (gtk_widget_get_style_context (widget), cr,
          0, 0,
          gtk_widget_get_allocated_width (widget),
          gtk_widget_get_allocated_height (widget));
      return TRUE;
    }

  gst_x_overlay_set_xwindow_id (GST_X_OVERLAY (priv->overlay),
    GDK_WINDOW_XID (gtk_widget_get_window (widget)));

  gst_x_overlay_expose (GST_X_OVERLAY (priv->overlay));

  return TRUE;
}
Esempio n. 2
0
G_MODULE_EXPORT gboolean
preview_draw_cb(
    GtkWidget *widget,
    cairo_t *cr,
    signal_user_data_t *ud)
{
#if defined(_ENABLE_GST)
#if GST_CHECK_VERSION(1, 0, 0)
    if (ud->preview->live_enabled && ud->preview->state == PREVIEW_STATE_LIVE)
    {
        if (GST_STATE(ud->preview->play) >= GST_STATE_PAUSED)
        {
            GstElement *vsink;
            GstVideoOverlay *vover;

            g_object_get(ud->preview->play, "video-sink", &vsink, NULL);
            if (GST_IS_BIN(vsink))
                vover = GST_VIDEO_OVERLAY(gst_bin_get_by_interface(
                                    GST_BIN(vsink), GST_TYPE_VIDEO_OVERLAY));
            else
                vover = GST_VIDEO_OVERLAY(vsink);
            gst_video_overlay_expose(vover);
            // For some reason, the exposed region doesn't always get
            // cleaned up here. But a delayed gst_x_overlay_expose()
            // takes care of it.
            g_idle_add((GSourceFunc)delayed_expose_cb, ud);
        }
        return FALSE;
    }
#else
    if (ud->preview->live_enabled && ud->preview->state == PREVIEW_STATE_LIVE)
    {
        if (GST_STATE(ud->preview->play) >= GST_STATE_PAUSED)
        {
            GstElement *vsink;
            GstXOverlay *xover;

            g_object_get(ud->preview->play, "video-sink", &vsink, NULL);
            if (GST_IS_BIN(vsink))
                xover = GST_X_OVERLAY(gst_bin_get_by_interface(
                                        GST_BIN(vsink), GST_TYPE_X_OVERLAY));
            else
                xover = GST_X_OVERLAY(vsink);
            gst_x_overlay_expose(xover);
            // For some reason, the exposed region doesn't always get
            // cleaned up here. But a delayed gst_x_overlay_expose()
            // takes care of it.
            g_idle_add((GSourceFunc)delayed_expose_cb, ud);
        }
        return FALSE;
    }
#endif
#endif

    if (ud->preview->pix != NULL)
    {
        _draw_pixbuf(cr, ud->preview->pix);
    }
    return FALSE;
}
Esempio n. 3
0
static gboolean
handle_draw_cb (GtkWidget * widget, cairo_t * cr, gpointer user_data)
{
  GstVideoRectangle *r = &anim_state.rect;
  GtkStyle *style;
  int width, height;

  width = gtk_widget_get_allocated_width (widget);
  height = gtk_widget_get_allocated_height (widget);

  style = gtk_widget_get_style (widget);

  gdk_cairo_set_source_color (cr, &style->bg[GTK_STATE_NORMAL]);

  /* we should only redraw outside of the video rect! */
  cairo_rectangle (cr, 0, 0, r->x, height);
  cairo_rectangle (cr, r->x + r->w, 0, width - (r->x + r->w), height);

  cairo_rectangle (cr, 0, 0, width, r->y);
  cairo_rectangle (cr, 0, r->y + r->h, width, height - (r->y + r->h));

  cairo_fill (cr);

  if (verbose) {
    g_print ("draw(%p)\n", widget);
  }
  gst_x_overlay_expose (anim_state.overlay);
  return FALSE;
}
Esempio n. 4
0
void QGstreamerGLTextureRenderer::repaintOverlay()
{
    if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) {
        //don't call gst_x_overlay_expose if the sink is in null state
        GstState state = GST_STATE_NULL;
        GstStateChangeReturn res = gst_element_get_state(m_videoSink, &state, NULL, 1000000);
        if (res != GST_STATE_CHANGE_FAILURE && state != GST_STATE_NULL) {
            gst_x_overlay_expose(GST_X_OVERLAY(m_videoSink));
        }
    }
}
Esempio n. 5
0
void Video_RX::run(){




        emit displayed(video_sink,2);
        gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_PLAYING);
        gst_x_overlay_expose(GST_X_OVERLAY(video_sink));
        new_video_rx_stream=true;

}
P_INVOKE void
bp_video_window_expose (BansheePlayer *player, GdkWindow *window, gboolean direct)
{
    g_return_if_fail (IS_BANSHEE_PLAYER (player));
    
    if (direct && player->xoverlay != NULL && GST_IS_X_OVERLAY (player->xoverlay)) {
        gst_x_overlay_expose (player->xoverlay);
        return;
    }

    if (player->xoverlay == NULL && !bp_video_find_xoverlay (player)) {
        return;
    }
    
    gst_object_ref (player->xoverlay);

    gst_x_overlay_set_xwindow_id (player->xoverlay, player->video_window_xid);
    gst_x_overlay_expose (player->xoverlay);

    gst_object_unref (player->xoverlay);
}
Esempio n. 7
0
 void VideoWidget::WindowExposed()
 {
     if (video_overlay_ && GST_IS_X_OVERLAY(video_overlay_))
     {
         // Expose the overlay (some sort of update)
         qDebug() << name_ << " >> WindowExposed() called";
         #ifdef Q_WS_X11
             QApplication::syncX();
         #endif
         gst_x_overlay_expose(GST_X_OVERLAY(video_overlay_));
     }
 }
static void gst_native_surface_init (JNIEnv *env, jobject thiz, jobject surface) {
  CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
  if (!data) return;
  ANativeWindow *new_native_window = ANativeWindow_fromSurface(env, surface);
  GST_DEBUG ("Received surface %p (native window %p)", surface, new_native_window);

  if (data->native_window) {
    ANativeWindow_release (data->native_window);
    if (data->native_window == new_native_window) {
      GST_DEBUG ("New native window is the same as the previous one", data->native_window);
      if (data->pipeline) {
        gst_x_overlay_expose(GST_X_OVERLAY (data->pipeline));
        gst_x_overlay_expose(GST_X_OVERLAY (data->pipeline));
      }
      return;
    } else {
      GST_DEBUG ("Released previous native window %p", data->native_window);
      data->initialized = FALSE;
    }
  }
  data->native_window = new_native_window;

  check_initialization_complete (data);
}
Esempio n. 9
0
static void
empathy_video_widget_element_added_cb (FsElementAddedNotifier *notifier,
  GstBin *bin, GstElement *element, EmpathyVideoWidget *self)
{
  EmpathyVideoWidgetPriv *priv = GET_PRIV (self);

  /* We assume the overlay is the sink */
  g_mutex_lock (priv->lock);
  if (priv->overlay == NULL && GST_IS_X_OVERLAY (element))
    {
      priv->overlay = element;
      g_object_add_weak_pointer (G_OBJECT (element),
        (gpointer) &priv->overlay);
      empathy_video_widget_element_set_sink_properties_unlocked (self);
      gst_x_overlay_expose (GST_X_OVERLAY (priv->overlay));
    }
  g_mutex_unlock (priv->lock);
}
Esempio n. 10
0
static void
empathy_video_widget_element_added_cb (FsElementAddedNotifier *notifier,
  GstBin *bin, GstElement *element, EmpathyVideoWidget *self)
{
  EmpathyVideoWidgetPriv *priv = GET_PRIV (self);

  if (priv->overlay == NULL && GST_IS_X_OVERLAY (element))
    {
      priv->overlay = element;
      gst_x_overlay_expose (GST_X_OVERLAY (priv->overlay));
    }

  if (g_object_class_find_property (
      G_OBJECT_GET_CLASS (element), "force-aspect-ratio"))
    {
      g_object_set (G_OBJECT (element), "force-aspect-ratio", TRUE, NULL);
    }
}
Esempio n. 11
0
static gboolean
empathy_video_widget_expose_event (GtkWidget *widget, GdkEventExpose *event)
{
  EmpathyVideoWidget *self = EMPATHY_VIDEO_WIDGET (widget);
  EmpathyVideoWidgetPriv *priv = GET_PRIV (self);

  if (event != NULL && event->count > 0)
    return TRUE;

  if (priv->overlay == NULL)
    return TRUE;

  gst_x_overlay_set_xwindow_id (GST_X_OVERLAY (priv->overlay),
    GDK_WINDOW_XID (widget->window));

  gst_x_overlay_expose (GST_X_OVERLAY (priv->overlay));

  return TRUE;
}
Esempio n. 12
0
G_MODULE_EXPORT gboolean
delayed_expose_cb(signal_user_data_t *ud)
{
    GstElement *vsink;
    GstXOverlay *xover;

    if (!ud->preview->live_enabled)
        return FALSE;

    g_object_get(ud->preview->play, "video-sink", &vsink, NULL);
    if (vsink == NULL)
        return FALSE;

    if (GST_IS_BIN(vsink))
        xover = GST_X_OVERLAY(gst_bin_get_by_interface(
                                GST_BIN(vsink), GST_TYPE_X_OVERLAY));
    else
        xover = GST_X_OVERLAY(vsink);
    gst_x_overlay_expose(xover);
    // This function is initiated by g_idle_add.  Must return false
    // so that it is not called again
    return FALSE;
}
Esempio n. 13
0
static gboolean
empathy_video_widget_expose_event (GtkWidget *widget, GdkEventExpose *event)
{
  EmpathyVideoWidget *self = EMPATHY_VIDEO_WIDGET (widget);
  EmpathyVideoWidgetPriv *priv = GET_PRIV (self);

  if (event != NULL && event->count > 0)
    return TRUE;

  if (priv->overlay == NULL)
    {
      gdk_window_clear_area (gtk_widget_get_window (widget), 0, 0,
        widget->allocation.width, widget->allocation.height);
      return TRUE;
    }

  gst_x_overlay_set_xwindow_id (GST_X_OVERLAY (priv->overlay),
    GDK_WINDOW_XID (gtk_widget_get_window (widget)));

  gst_x_overlay_expose (GST_X_OVERLAY (priv->overlay));

  return TRUE;
}
Esempio n. 14
0
void QGstreamerVideoWidgetControl::windowExposed()
{
    if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink))
        gst_x_overlay_expose(GST_X_OVERLAY(m_videoSink));
}
Esempio n. 15
0
void X11Renderer::windowExposed()
{
    QApplication::syncX();
    if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink))
        gst_x_overlay_expose(GST_X_OVERLAY(m_videoSink));
}
Esempio n. 16
0
static gboolean
expose_cb (GtkWidget * widget, GdkEventExpose * event, GstElement * videosink)
{
  gst_x_overlay_expose (GST_X_OVERLAY (videosink));
  return FALSE;
}
Esempio n. 17
0
static VALUE
rg_expose(VALUE self)
{
    gst_x_overlay_expose(SELF(self));
    return self;
}
Esempio n. 18
0
int tsmf_window_resize(TSMFGstreamerDecoder *decoder, int x, int y, int width,
					   int height, int nr_rects, RDP_RECT *rects)
{
	if (decoder->media_type != TSMF_MAJOR_TYPE_VIDEO)
		return -3;
	else
	{
#if GST_VERSION_MAJOR > 0
		GstVideoOverlay *overlay = GST_VIDEO_OVERLAY(decoder->outsink);
#else
		GstXOverlay *overlay = GST_X_OVERLAY(decoder->outsink);
#endif
		struct X11Handle *hdl = (struct X11Handle *)decoder->platform;
		DEBUG_TSMF("resize: x=%d, y=%d, w=%d, h=%d", x, y, width, height);
		assert(decoder);
		assert(hdl);
#if GST_VERSION_MAJOR > 0

		if (!gst_video_overlay_set_render_rectangle(overlay, 0, 0, width, height))
		{
			DEBUG_WARN("Could not resize overlay!");
		}

		gst_video_overlay_expose(overlay);
#else
		if (!gst_x_overlay_set_render_rectangle(overlay, 0, 0, width, height))
		{
			DEBUG_WARN("Could not resize overlay!");
		}

		gst_x_overlay_expose(overlay);
#endif

		if (hdl->subwin)
		{
			XMoveResizeWindow(hdl->disp, hdl->subwin, x, y, width, height);
#if defined(WITH_XEXT)

			if (hdl->has_shape)
			{
				int i;
				XRectangle *xrects = calloc(nr_rects, sizeof(XRectangle));

				for (i=0; i<nr_rects; i++)
				{
					xrects[i].x = rects[i].x - x;
					xrects[i].y = rects[i].y - y;
					xrects[i].width = rects[i].width;
					xrects[i].height = rects[i].height;
				}

				XShapeCombineRectangles(hdl->disp, hdl->subwin, ShapeBounding, x, y, xrects, nr_rects, ShapeSet, 0);
				free(xrects);
			}

#endif
			XSync(hdl->disp, FALSE);
		}

		return 0;
	}
}
Esempio n. 19
0
/* Update the window. */
void video_expose()
{
	gst_x_overlay_expose(GST_X_OVERLAY(xvsink));
}
Esempio n. 20
0
static inline void gst_video_overlay_expose(GstXOverlay *overlay) {
	gst_x_overlay_expose(overlay);
}
GstBusSyncReply Gst_bus_call(GstBus * bus, GstMessage *msg, gpointer user_data)
{
	gchar * sourceName;
	
	// source
	GstObject * source;
	source = GST_MESSAGE_SRC(msg);
	
	if (!GST_IS_OBJECT(source))
		return GST_BUS_DROP;
	
	sourceName = gst_object_get_name(source);

	switch (GST_MESSAGE_TYPE(msg)) 
	{
		case GST_MESSAGE_EOS: 
		{
			g_message("End-of-stream");
			end_eof = 1;
			break;
		}
		
		case GST_MESSAGE_ERROR: 
		{
			gchar * debug;
			GError *err;
			gst_message_parse_error(msg, &err, &debug);
			g_free (debug);
			lt_info_c( "%s:%s - GST_MESSAGE_ERROR: %s (%i) from %s\n", FILENAME, __FUNCTION__, err->message, err->code, sourceName );
			if ( err->domain == GST_STREAM_ERROR )
			{
				if ( err->code == GST_STREAM_ERROR_CODEC_NOT_FOUND )
				{
					if ( g_strrstr(sourceName, "videosink") )
						lt_info_c( "%s:%s - GST_MESSAGE_ERROR: videosink\n", FILENAME, __FUNCTION__ ); //FIXME: how shall playback handle this event???
					else if ( g_strrstr(sourceName, "audiosink") )
						lt_info_c( "%s:%s - GST_MESSAGE_ERROR: audioSink\n", FILENAME, __FUNCTION__ ); //FIXME: how shall playback handle this event???
				}
			}
			g_error_free(err);

			end_eof = 1; 		// NOTE: just to exit
			
			break;
		}
		
		case GST_MESSAGE_INFO:
		{
			gchar *debug;
			GError *inf;
	
			gst_message_parse_info (msg, &inf, &debug);
			g_free (debug);
			if ( inf->domain == GST_STREAM_ERROR && inf->code == GST_STREAM_ERROR_DECODE )
			{
				if ( g_strrstr(sourceName, "videosink") )
					lt_info_c( "%s:%s - GST_MESSAGE_INFO: videosink\n", FILENAME, __FUNCTION__ ); //FIXME: how shall playback handle this event???
			}
			g_error_free(inf);
			break;
		}
		
		case GST_MESSAGE_TAG:
		{
			GstTagList *tags, *result;
			gst_message_parse_tag(msg, &tags);
	
			result = gst_tag_list_merge(m_stream_tags, tags, GST_TAG_MERGE_REPLACE);
			if (result)
			{
				if (m_stream_tags)
					gst_tag_list_free(m_stream_tags);
				m_stream_tags = result;
			}
	
			const GValue *gv_image = gst_tag_list_get_value_index(tags, GST_TAG_IMAGE, 0);
			if ( gv_image )
			{
				GstBuffer *buf_image;
				buf_image = gst_value_get_buffer (gv_image);
				int fd = open("/tmp/.id3coverart", O_CREAT|O_WRONLY|O_TRUNC, 0644);
				if(fd >= 0)
				{
					int ret = write(fd, GST_BUFFER_DATA(buf_image), GST_BUFFER_SIZE(buf_image));
					close(fd);
					lt_info_c( "%s:%s - GST_MESSAGE_INFO: cPlayback::state /tmp/.id3coverart %d bytes written\n", FILENAME, __FUNCTION__ , ret);
				}
				//FIXME: how shall playback handle this event???
			}
			gst_tag_list_free(tags);
			lt_info_c( "%s:%s - GST_MESSAGE_INFO: update info tags\n", FILENAME, __FUNCTION__);  //FIXME: how shall playback handle this event???
			break;
		}
		
		case GST_MESSAGE_STATE_CHANGED:
		{
			if(GST_MESSAGE_SRC(msg) != GST_OBJECT(m_gst_playbin))
				break;

			GstState old_state, new_state;
			gst_message_parse_state_changed(msg, &old_state, &new_state, NULL);
			
			if(old_state == new_state)
				break;
			lt_info_c( "%s:%s - GST_MESSAGE_STATE_CHANGED: state transition %s -> %s\n", FILENAME, __FUNCTION__, gst_element_state_get_name(old_state), gst_element_state_get_name(new_state));
		
			GstStateChange transition = (GstStateChange)GST_STATE_TRANSITION(old_state, new_state);
		
			switch(transition)
			{
				case GST_STATE_CHANGE_NULL_TO_READY:
				{
				}	break;
				case GST_STATE_CHANGE_READY_TO_PAUSED:
				{
					GstIterator *children;
					if (audioSink)
					{
						gst_object_unref(GST_OBJECT(audioSink));
						audioSink = NULL;
					}
					
					if (videoSink)
					{
						gst_object_unref(GST_OBJECT(videoSink));
						videoSink = NULL;
					}
					children = gst_bin_iterate_recurse(GST_BIN(m_gst_playbin));
					audioSink = GST_ELEMENT_CAST(gst_iterator_find_custom(children, (GCompareFunc)match_sinktype, (gpointer)"GstDVBAudioSink"));
					videoSink = GST_ELEMENT_CAST(gst_iterator_find_custom(children, (GCompareFunc)match_sinktype, (gpointer)"GstDVBVideoSink"));
					gst_iterator_free(children);
					
				}	break;
				case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
				{
				}	break;
				case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
				{
				}	break;
				case GST_STATE_CHANGE_PAUSED_TO_READY:
				{
					if (audioSink)
					{
						gst_object_unref(GST_OBJECT(audioSink));
						audioSink = NULL;
					}
					if (videoSink)
					{
						gst_object_unref(GST_OBJECT(videoSink));
						videoSink = NULL;
					}
				}	break;
				case GST_STATE_CHANGE_READY_TO_NULL:
				{
				}	break;
			}
			break;
		}
#if 0
		case GST_MESSAGE_ELEMENT:
		{
			if(gst_structure_has_name(gst_message_get_structure(msg), "prepare-xwindow-id")) 
			{
				// set window id
				gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(GST_MESSAGE_SRC (msg)), glfb->getWindowID());
				
				// reshape window
				gst_x_overlay_set_render_rectangle(GST_X_OVERLAY(GST_MESSAGE_SRC (msg)), 0, 0, glfb->getOSDWidth(), glfb->getOSDHeight());
				
				// sync frames
				gst_x_overlay_expose(GST_X_OVERLAY(GST_MESSAGE_SRC (msg)));
			}
		}
#endif
		break;
		default:
			break;
	}

	return GST_BUS_DROP;
}
Esempio n. 22
0
//redraw the current frame in the drawable
void Pipeline::doExpose() const
{
    if (m_pipeline && m_glimagesink)
        gst_x_overlay_expose (GST_X_OVERLAY (m_glimagesink));
}