Ejemplo n.º 1
0
static void
gst_wl_window_resize_video_surface (GstWlWindow * window, gboolean commit)
{
  GstVideoRectangle src, res;

  /* center the video_subsurface inside area_subsurface */
  src.w = window->video_width;
  src.h = window->video_height;
  gst_video_sink_center_rect (src, window->render_rectangle, &res, TRUE);

  wl_subsurface_set_position (window->video_subsurface, res.x, res.y);
  wl_viewport_set_destination (window->video_viewport, res.w, res.h);

  if (commit) {
    wl_surface_damage (window->video_surface, 0, 0, res.w, res.h);
    wl_surface_commit (window->video_surface);
  }

  if (gst_wl_window_is_toplevel (window)) {
    struct wl_region *region;

    region = wl_compositor_create_region (window->display->compositor);
    wl_region_add(region, 0, 0, window->render_rectangle.w, window->render_rectangle.h);
    wl_surface_set_input_region (window->area_surface, region);
    wl_region_destroy (region);
  }

  /* this is saved for use in wl_surface_damage */
  window->surface_width = res.w;
  window->surface_height = res.h;
}
Ejemplo n.º 2
0
static void
gst_sdlvideosink_navigation_send_event (GstNavigation * navigation,
                                        GstStructure * structure)
{
    GstSDLVideoSink *sdlvideosink = GST_SDLVIDEOSINK (navigation);
    GstEvent *event;
    GstVideoRectangle dst = { 0, };
    GstVideoRectangle src = { 0, };
    GstVideoRectangle result;
    double x, y, old_x, old_y;
    GstPad *pad = NULL;

    src.w = GST_VIDEO_SINK_WIDTH (sdlvideosink);
    src.h = GST_VIDEO_SINK_HEIGHT (sdlvideosink);
    dst.w = sdlvideosink->width;
    dst.h = sdlvideosink->height;
    gst_video_sink_center_rect (src, dst, &result, FALSE);

    event = gst_event_new_navigation (structure);

    /* Our coordinates can be wrong here if we centered the video */

    /* Converting pointer coordinates to the non scaled geometry */
    if (gst_structure_get_double (structure, "pointer_x", &old_x)) {
        x = old_x;

        if (x >= result.x && x <= (result.x + result.w)) {
            x -= result.x;
            x *= sdlvideosink->width;
            x /= result.w;
        } else {
            x = 0;
        }
        GST_DEBUG_OBJECT (sdlvideosink, "translated navigation event x "
                          "coordinate from %f to %f", old_x, x);
        gst_structure_set (structure, "pointer_x", G_TYPE_DOUBLE, x, NULL);
    }
    if (gst_structure_get_double (structure, "pointer_y", &old_y)) {
        y = old_y;

        if (y >= result.y && y <= (result.y + result.h)) {
            y -= result.y;
            y *= sdlvideosink->height;
            y /= result.h;
        } else {
            y = 0;
        }
        GST_DEBUG_OBJECT (sdlvideosink, "translated navigation event y "
                          "coordinate from %f to %f", old_y, y);
        gst_structure_set (structure, "pointer_y", G_TYPE_DOUBLE, y, NULL);
    }

    pad = gst_pad_get_peer (GST_VIDEO_SINK_PAD (sdlvideosink));

    if (GST_IS_PAD (pad) && GST_IS_EVENT (event)) {
        gst_pad_send_event (pad, event);

        gst_object_unref (pad);
    }
}
Ejemplo n.º 3
0
static void
_fit_stream_to_allocated_size (GtkGstBaseWidget * base_widget,
    GtkAllocation * allocation, GstVideoRectangle * result)
{
  if (base_widget->force_aspect_ratio) {
    GstVideoRectangle src, dst;

    src.x = 0;
    src.y = 0;
    src.w = base_widget->display_width;
    src.h = base_widget->display_height;

    dst.x = 0;
    dst.y = 0;
    dst.w = allocation->width;
    dst.h = allocation->height;

    gst_video_sink_center_rect (src, dst, result, TRUE);
  } else {
    result->x = 0;
    result->y = 0;
    result->w = allocation->width;
    result->h = allocation->height;
  }
}
EGLNativeWindowType
platform_create_native_window (gint width, gint height, gpointer * window_data)
{
  DISPMANX_ELEMENT_HANDLE_T dispman_element;
  DISPMANX_DISPLAY_HANDLE_T dispman_display;
  DISPMANX_UPDATE_HANDLE_T dispman_update;
  RPIWindowData *data;
  VC_RECT_T dst_rect;
  VC_RECT_T src_rect;
  GstVideoRectangle src, dst, res;

  uint32_t dp_height;
  uint32_t dp_width;

  int ret;

  ret = graphics_get_display_size (0, &dp_width, &dp_height);
  if (ret < 0) {
    GST_ERROR ("Can't open display");
    return (EGLNativeWindowType) 0;
  }
  GST_DEBUG ("Got display size: %dx%d\n", dp_width, dp_height);
  GST_DEBUG ("Source size: %dx%d\n", width, height);

  /* Center width*height frame inside dp_width*dp_height */
  src.w = width;
  src.h = height;
  src.x = src.y = 0;
  dst.w = dp_width;
  dst.h = dp_height;
  dst.x = dst.y = 0;
  gst_video_sink_center_rect (src, dst, &res, TRUE);

  dst_rect.x = res.x;
  dst_rect.y = res.y;
  dst_rect.width = res.w;
  dst_rect.height = res.h;

  src_rect.x = 0;
  src_rect.y = 0;
  src_rect.width = width << 16;
  src_rect.height = height << 16;

  dispman_display = vc_dispmanx_display_open (0);
  dispman_update = vc_dispmanx_update_start (0);
  dispman_element = vc_dispmanx_element_add (dispman_update,
      dispman_display, 0, &dst_rect, 0, &src_rect,
      DISPMANX_PROTECTION_NONE, 0, 0, 0);

  *window_data = data = g_slice_new0 (RPIWindowData);
  data->d = dispman_display;
  data->w.element = dispman_element;
  data->w.width = width;
  data->w.height = height;
  vc_dispmanx_update_submit_sync (dispman_update);

  return (EGLNativeWindowType) data;
}
static void
window_resize (GstGLWindowDispmanxEGL * window_egl, guint width, guint height)
{
  GST_DEBUG ("resizing window from %ux%u to %ux%u",
      window_egl->native.width, window_egl->native.height, width, height);

  if (window_egl->display) {
    VC_RECT_T dst_rect;
    VC_RECT_T src_rect;
    GstVideoRectangle src, dst, res;
    DISPMANX_UPDATE_HANDLE_T dispman_update;
    VC_DISPMANX_ALPHA_T alpha =
        { DISPMANX_FLAGS_ALPHA_FIXED_ALL_PIXELS, 255, 0 };

    /* Center width*height frame inside dp_width*dp_height */
    src.w = width;
    src.h = height;
    src.x = src.y = 0;
    dst.w = window_egl->dp_width;
    dst.h = window_egl->dp_height;
    dst.x = dst.y = 0;
    gst_video_sink_center_rect (src, dst, &res, FALSE);

    dst_rect.x = res.x;
    dst_rect.y = res.y;
    dst_rect.width = res.w;
    dst_rect.height = res.h;

    src_rect.x = 0;
    src_rect.y = 0;
    src_rect.width = width << 16;
    src_rect.height = height << 16;

    dispman_update = vc_dispmanx_update_start (0);

    if (window_egl->native.element) {
      vc_dispmanx_element_change_attributes (dispman_update,
          window_egl->native.element, 0x00000110, 0, 0, &dst_rect, &src_rect, 0,
          0);
    } else {
      window_egl->native.element = vc_dispmanx_element_add (dispman_update,
          window_egl->display, 0, &dst_rect, 0, &src_rect,
          DISPMANX_PROTECTION_NONE, &alpha, 0, 0);
    }

    vc_dispmanx_update_submit_sync (dispman_update);

    if (GST_GL_WINDOW (window_egl)->resize)
      GST_GL_WINDOW (window_egl)->
          resize (GST_GL_WINDOW (window_egl)->resize_data, width, height);
  }

  window_egl->native.width = width;
  window_egl->native.height = height;
}
static void
gst_glimage_sink_on_resize (const GstGLImageSink * gl_sink, gint width,
    gint height)
{
  /* Here gl_sink members (ex:gl_sink->info) have a life time of set_caps.
   * It means that they cannot not change between two set_caps
   */
  const GstGLFuncs *gl = gl_sink->context->gl_vtable;

  GST_TRACE ("GL Window resized to %ux%u", width, height);

  /* check if a client reshape callback is registered */
  if (gl_sink->clientReshapeCallback)
    gl_sink->clientReshapeCallback (width, height, gl_sink->client_data);

  /* default reshape */
  else {
    if (gl_sink->keep_aspect_ratio) {
      GstVideoRectangle src, dst, result;

      src.x = 0;
      src.y = 0;
      src.w = GST_VIDEO_INFO_WIDTH (&gl_sink->info);
      src.h = GST_VIDEO_INFO_HEIGHT (&gl_sink->info);

      dst.x = 0;
      dst.y = 0;
      dst.w = width;
      dst.h = height;

      gst_video_sink_center_rect (src, dst, &result, TRUE);
      gl->Viewport (result.x, result.y, result.w, result.h);
    } else {
      gl->Viewport (0, 0, width, height);
    }
#if GST_GL_HAVE_OPENGL
    if (USING_OPENGL (gl_sink->context)) {
      gl->MatrixMode (GL_PROJECTION);
      gl->LoadIdentity ();
      gluOrtho2D (0, width, 0, height);
      gl->MatrixMode (GL_MODELVIEW);
    }
#endif
  }
}
Ejemplo n.º 7
0
static void
gst_wl_window_resize_internal (GstWlWindow * window, gboolean commit)
{
  GstVideoRectangle src, res;

  src.w = window->video_width;
  src.h = window->video_height;
  gst_video_sink_center_rect (src, window->render_rectangle, &res, TRUE);

  if (window->subsurface)
    wl_subsurface_set_position (window->subsurface,
        window->render_rectangle.x + res.x, window->render_rectangle.y + res.y);
  wl_viewport_set_destination (window->viewport, res.w, res.h);

  if (commit) {
    wl_surface_damage (window->surface, 0, 0, res.w, res.h);
    wl_surface_commit (window->surface);
  }

  /* this is saved for use in wl_surface_damage */
  window->surface_width = res.w;
  window->surface_height = res.h;
}
static void
window_resize (GstGLWindowDispmanxEGL * window_egl, guint width, guint height,
    gboolean visible)
{
  GstGLWindow *window = GST_GL_WINDOW (window_egl);

  GST_DEBUG ("resizing %s window from %ux%u to %ux%u",
      visible ? "visible" : "invisible", window_egl->native.width,
      window_egl->native.height, width, height);

  if (window_egl->display) {
    VC_RECT_T dst_rect;
    VC_RECT_T src_rect;
    GstVideoRectangle src, res;
    DISPMANX_UPDATE_HANDLE_T dispman_update;
    uint32_t opacity = visible ? 255 : 0;
    VC_DISPMANX_ALPHA_T alpha =
        { DISPMANX_FLAGS_ALPHA_FIXED_ALL_PIXELS, opacity, 0 };

    src.w = width;
    src.h = height;
    src.x = src.y = 0;

    /* If there is no render rectangle, center the width*height frame
     *  inside dp_width*dp_height */
    if (window_egl->render_rect.w <= 0 || window_egl->render_rect.h <= 0) {
      GstVideoRectangle dst;
      dst.w = window_egl->dp_width;
      dst.h = window_egl->dp_height;
      dst.x = dst.y = 0;
      gst_video_sink_center_rect (src, dst, &res, FALSE);
    } else {
      gst_video_sink_center_rect (src, window_egl->render_rect, &res, FALSE);
    }

    dst_rect.x = res.x;
    dst_rect.y = res.y;
    dst_rect.width = res.w;
    dst_rect.height = res.h;

    src_rect.x = 0;
    src_rect.y = 0;
    src_rect.width = width << 16;
    src_rect.height = height << 16;

    dispman_update = vc_dispmanx_update_start (0);

    if (window_egl->native.element) {
      uint32_t change_flags =
          ELEMENT_CHANGE_OPACITY | ELEMENT_CHANGE_DEST_RECT |
          ELEMENT_CHANGE_SRC_RECT;
      vc_dispmanx_element_change_attributes (dispman_update,
          window_egl->native.element, change_flags, 0, opacity, &dst_rect,
          &src_rect, 0, 0);
    } else {
      window_egl->native.element = vc_dispmanx_element_add (dispman_update,
          window_egl->display, 0, &dst_rect, 0, &src_rect,
          DISPMANX_PROTECTION_NONE, &alpha, 0, 0);
    }

    vc_dispmanx_update_submit_sync (dispman_update);

    gst_gl_window_resize (window, width, height);
  }

  window_egl->native.width = width;
  window_egl->native.height = height;
}
Ejemplo n.º 9
0
static gboolean
gtk_gst_widget_draw (GtkWidget * widget, cairo_t * cr)
{
  GtkGstWidget *gst_widget = (GtkGstWidget *) widget;
  guint widget_width, widget_height;
  cairo_surface_t *surface;
  GstVideoFrame frame;

  widget_width = gtk_widget_get_allocated_width (widget);
  widget_height = gtk_widget_get_allocated_height (widget);

  g_mutex_lock (&gst_widget->priv->lock);

  /* failed to map the video frame */
  if (gst_widget->priv->negotiated && gst_widget->priv->buffer
      && gst_video_frame_map (&frame, &gst_widget->priv->v_info,
          gst_widget->priv->buffer, GST_MAP_READ)) {
    gdouble scale_x = (gdouble) widget_width / gst_widget->priv->display_width;
    gdouble scale_y =
        (gdouble) widget_height / gst_widget->priv->display_height;
    GstVideoRectangle result;
    cairo_format_t format;

    gst_widget->priv->v_info = frame.info;
    if (frame.info.finfo->format == GST_VIDEO_FORMAT_ARGB ||
        frame.info.finfo->format == GST_VIDEO_FORMAT_BGRA) {
      format = CAIRO_FORMAT_ARGB32;
    } else {
      format = CAIRO_FORMAT_RGB24;
    }

    surface = cairo_image_surface_create_for_data (frame.data[0],
        format, frame.info.width, frame.info.height, frame.info.stride[0]);

    if (gst_widget->priv->force_aspect_ratio) {
      GstVideoRectangle src, dst;

      src.x = 0;
      src.y = 0;
      src.w = gst_widget->priv->display_width;
      src.h = gst_widget->priv->display_height;

      dst.x = 0;
      dst.y = 0;
      dst.w = widget_width;
      dst.h = widget_height;

      gst_video_sink_center_rect (src, dst, &result, TRUE);

      scale_x = scale_y = MIN (scale_x, scale_y);
    } else {
      result.x = 0;
      result.y = 0;
      result.w = widget_width;
      result.h = widget_height;
    }

    if (gst_widget->priv->ignore_alpha) {
      GdkRGBA color = { 0.0, 0.0, 0.0, 1.0 };

      gdk_cairo_set_source_rgba (cr, &color);
      if (result.x > 0) {
        cairo_rectangle (cr, 0, 0, result.x, widget_height);
        cairo_fill (cr);
      }
      if (result.y > 0) {
        cairo_rectangle (cr, 0, 0, widget_width, result.y);
        cairo_fill (cr);
      }
      if (result.w < widget_width) {
        cairo_rectangle (cr, result.x + result.w, 0, widget_width - result.w,
            widget_height);
        cairo_fill (cr);
      }
      if (result.h < widget_height) {
        cairo_rectangle (cr, 0, result.y + result.h, widget_width,
            widget_height - result.h);
        cairo_fill (cr);
      }
    }

    scale_x *=
        (gdouble) gst_widget->priv->display_width / (gdouble) frame.info.width;
    scale_y *=
        (gdouble) gst_widget->priv->display_height /
        (gdouble) frame.info.height;

    cairo_translate (cr, result.x, result.y);
    cairo_scale (cr, scale_x, scale_y);
    cairo_rectangle (cr, 0, 0, result.w, result.h);
    cairo_set_source_surface (cr, surface, 0, 0);
    cairo_paint (cr);

    cairo_surface_destroy (surface);

    gst_video_frame_unmap (&frame);
  } else {
    GdkRGBA color;

    if (gst_widget->priv->ignore_alpha) {
      color.red = color.blue = color.green = 0.0;
      color.alpha = 1.0;
    } else {
      gtk_style_context_get_color (gtk_widget_get_style_context (widget),
          GTK_STATE_FLAG_NORMAL, &color);
    }
    gdk_cairo_set_source_rgba (cr, &color);
    cairo_rectangle (cr, 0, 0, widget_width, widget_height);
    cairo_fill (cr);
  }

  g_mutex_unlock (&gst_widget->priv->lock);
  return FALSE;
}
Ejemplo n.º 10
0
static GstFlowReturn
gst_mir_sink_render (GstBaseSink * bsink, GstBuffer * buffer)
{
    GstMirSink *sink = GST_MIR_SINK (bsink);
    //GstVideoRectangle src, dst, res;
    GstBuffer *to_render;
    GstMirMeta *meta;
    //GstFlowReturn ret;

    GST_DEBUG_OBJECT (sink, "render buffer %p", buffer);

    meta = gst_buffer_get_mir_meta (buffer);

    if (meta && meta->sink == sink) {
        GST_LOG_OBJECT (sink, "buffer %p from our pool, writing directly", buffer);
        to_render = buffer;
    } else {
        //GstMapInfo src;
        GST_LOG_OBJECT (sink, "buffer %p not from our pool, copying", buffer);
        to_render = buffer;

#if 0
        if (!sink->pool)
            goto no_pool;

        if (!gst_buffer_pool_set_active (sink->pool, TRUE))
            goto activate_failed;

        ret = gst_buffer_pool_acquire_buffer (sink->pool, &to_render, NULL);
        if (ret != GST_FLOW_OK)
            goto no_buffer;

        gst_buffer_map (buffer, &src, GST_MAP_READ);
        gst_buffer_fill (to_render, 0, src.data, src.size);
        gst_buffer_unmap (buffer, &src);

        meta = gst_buffer_get_mir_meta (to_render);
#endif
    }

    g_signal_emit (G_OBJECT (bsink), frame_ready_signal, 0);

#if 0
    src.w = sink->video_width;
    src.h = sink->video_height;
    dst.w = sink->window->width;
    dst.h = sink->window->height;

    gst_video_sink_center_rect (src, dst, &res, FALSE);
#endif

    if (buffer != to_render)
        gst_buffer_unref (to_render);
    return GST_FLOW_OK;

#if 0
no_buffer:
    {
        GST_WARNING_OBJECT (sink, "could not create image");
        return ret;
    }
no_pool:
    {
        GST_ELEMENT_ERROR (sink, RESOURCE, WRITE,
                           ("Internal error: can't allocate images"),
                           ("We don't have a bufferpool negotiated"));
        return GST_FLOW_ERROR;
    }
activate_failed:
    {
        GST_ERROR_OBJECT (sink, "failed to activate bufferpool.");
        ret = GST_FLOW_ERROR;
        return ret;
    }
#endif
}
static GstFlowReturn
gst_vdp_vpp_drain (GstVdpVideoPostProcess * vpp)
{
  GstVdpPicture current_pic;

  guint32 video_surfaces_past_count;
  VdpVideoSurface video_surfaces_past[MAX_PICTURES];

  guint32 video_surfaces_future_count;
  VdpVideoSurface video_surfaces_future[MAX_PICTURES];

  GstFlowReturn ret;

  while (gst_vdp_vpp_get_next_picture (vpp,
          &current_pic,
          &video_surfaces_past_count, video_surfaces_past,
          &video_surfaces_future_count, video_surfaces_future)) {
    GError *err;
    GstVdpOutputBuffer *outbuf;

    GstStructure *structure;
    GstVideoRectangle src_r = { 0, }
    , dest_r = {
    0,};
    VdpRect rect;

    GstVdpDevice *device;
    VdpStatus status;

    err = NULL;
    ret =
        gst_vdp_output_src_pad_alloc_buffer ((GstVdpOutputSrcPad *) vpp->srcpad,
        &outbuf, &err);
    if (ret != GST_FLOW_OK)
      goto output_pad_error;

    src_r.w = vpp->width;
    src_r.h = vpp->height;
    if (vpp->got_par) {
      gint new_width;

      new_width = gst_util_uint64_scale_int (src_r.w, vpp->par_n, vpp->par_d);
      src_r.x += (src_r.w - new_width) / 2;
      src_r.w = new_width;
    }

    structure = gst_caps_get_structure (GST_BUFFER_CAPS (outbuf), 0);
    if (!gst_structure_get_int (structure, "width", &dest_r.w) ||
        !gst_structure_get_int (structure, "height", &dest_r.h))
      goto invalid_caps;

    if (vpp->force_aspect_ratio) {
      GstVideoRectangle res_r;

      gst_video_sink_center_rect (src_r, dest_r, &res_r, TRUE);
      rect.x0 = res_r.x;
      rect.x1 = res_r.w + res_r.x;
      rect.y0 = res_r.y;
      rect.y1 = res_r.h + res_r.y;
    } else {
      rect.x0 = 0;
      rect.x1 = dest_r.w;
      rect.y0 = 0;
      rect.y1 = dest_r.h;
    }

    device = vpp->device;
    status =
        device->vdp_video_mixer_render (vpp->mixer, VDP_INVALID_HANDLE, NULL,
        current_pic.structure, video_surfaces_past_count, video_surfaces_past,
        current_pic.buf->surface, video_surfaces_future_count,
        video_surfaces_future, NULL, outbuf->surface, NULL, &rect, 0, NULL);
    if (status != VDP_STATUS_OK)
      goto render_error;

    GST_BUFFER_TIMESTAMP (outbuf) = current_pic.timestamp;
    if (gst_vdp_vpp_is_interlaced (vpp))
      GST_BUFFER_DURATION (outbuf) = vpp->field_duration;
    else
      GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (current_pic.buf);

    if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_DISCONT))
      GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);

    if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_PREROLL))
      GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_PREROLL);

    if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_GAP))
      GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP);

    err = NULL;
    ret =
        gst_vdp_output_src_pad_push ((GstVdpOutputSrcPad *) vpp->srcpad,
        outbuf, &err);
    if (ret != GST_FLOW_OK)
      goto output_pad_error;

    continue;

  invalid_caps:
    gst_buffer_unref (GST_BUFFER (outbuf));
    GST_ELEMENT_ERROR (vpp, STREAM, FAILED, ("Invalid output caps"), (NULL));
    ret = GST_FLOW_ERROR;
    break;

  render_error:
    gst_buffer_unref (GST_BUFFER (outbuf));
    GST_ELEMENT_ERROR (vpp, RESOURCE, READ,
        ("Could not postprocess frame"),
        ("Error returned from vdpau was: %s",
            device->vdp_get_error_string (status)));
    ret = GST_FLOW_ERROR;
    break;

  output_pad_error:
    if (ret == GST_FLOW_ERROR && err != NULL)
      gst_vdp_vpp_post_error (vpp, err);
    break;
  }

  return ret;
}
Ejemplo n.º 12
0
static GstFlowReturn
gst_eglglessink_render (GstEglGlesSink * eglglessink)
{
  guint dar_n, dar_d;
  gint i;
  gint w, h;

  w = GST_VIDEO_SINK_WIDTH (eglglessink);
  h = GST_VIDEO_SINK_HEIGHT (eglglessink);

  /* If no one has set a display rectangle on us initialize
   * a sane default. According to the docs on the xOverlay
   * interface we are supposed to fill the overlay 100%. We
   * do this trying to take PAR/DAR into account unless the
   * calling party explicitly ask us not to by setting
   * force_aspect_ratio to FALSE.
   */
  if (gst_egl_adaptation_update_surface_dimensions
      (eglglessink->egl_context) || eglglessink->render_region_changed
      || !eglglessink->display_region.w || !eglglessink->display_region.h
      || eglglessink->size_changed) {
    GST_OBJECT_LOCK (eglglessink);

    if (!eglglessink->render_region_user) {
      eglglessink->render_region.x = 0;
      eglglessink->render_region.y = 0;
      eglglessink->render_region.w = eglglessink->egl_context->surface_width;
      eglglessink->render_region.h = eglglessink->egl_context->surface_height;
    }
    eglglessink->render_region_changed = FALSE;
    eglglessink->size_changed = FALSE;

    if (!eglglessink->force_aspect_ratio) {
      eglglessink->display_region.x = 0;
      eglglessink->display_region.y = 0;
      eglglessink->display_region.w = eglglessink->render_region.w;
      eglglessink->display_region.h = eglglessink->render_region.h;
    } else {
      GstVideoRectangle frame;

      frame.x = 0;
      frame.y = 0;

      if (!gst_video_calculate_display_ratio (&dar_n, &dar_d,
              w, h,
              eglglessink->par_n,
              eglglessink->par_d,
              eglglessink->egl_context->pixel_aspect_ratio_n,
              eglglessink->egl_context->pixel_aspect_ratio_d)) {
        GST_WARNING_OBJECT (eglglessink, "Could not compute resulting DAR");
        frame.w = w;
        frame.h = h;
      } else {
        /* Find suitable matching new size acording to dar & par
         * rationale for prefering leaving the height untouched
         * comes from interlacing considerations.
         * XXX: Move this to gstutils?
         */
        if (h % dar_d == 0) {
          frame.w = gst_util_uint64_scale_int (h, dar_n, dar_d);
          frame.h = h;
        } else if (w % dar_n == 0) {
          frame.h = gst_util_uint64_scale_int (w, dar_d, dar_n);
          frame.w = w;
        } else {
          /* Neither width nor height can be precisely scaled.
           * Prefer to leave height untouched. See comment above.
           */
          frame.w = gst_util_uint64_scale_int (h, dar_n, dar_d);
          frame.h = h;
        }
      }

      gst_video_sink_center_rect (frame, eglglessink->render_region,
          &eglglessink->display_region, TRUE);
    }

    glViewport (eglglessink->render_region.x,
        eglglessink->egl_context->surface_height -
        eglglessink->render_region.y -
        eglglessink->render_region.h,
        eglglessink->render_region.w, eglglessink->render_region.h);

    /* Clear the surface once if its content is preserved */
    if (eglglessink->egl_context->buffer_preserved) {
      glClearColor (0.0, 0.0, 0.0, 1.0);
      glClear (GL_COLOR_BUFFER_BIT);
    }

    if (!gst_eglglessink_setup_vbo (eglglessink, FALSE)) {
      GST_OBJECT_UNLOCK (eglglessink);
      GST_ERROR_OBJECT (eglglessink, "VBO setup failed");
      goto HANDLE_ERROR;
    }
    GST_OBJECT_UNLOCK (eglglessink);
  }

  if (!eglglessink->egl_context->buffer_preserved) {
    /* Draw black borders */
    GST_DEBUG_OBJECT (eglglessink, "Drawing black border 1");
    glUseProgram (eglglessink->egl_context->glslprogram[1]);

    glVertexAttribPointer (eglglessink->egl_context->position_loc[1], 3,
        GL_FLOAT, GL_FALSE, sizeof (coord5), (gpointer) (4 * sizeof (coord5)));
    if (got_gl_error ("glVertexAttribPointer"))
      goto HANDLE_ERROR;

    glDrawElements (GL_TRIANGLE_STRIP, 4, GL_UNSIGNED_SHORT, 0);
    if (got_gl_error ("glDrawElements"))
      goto HANDLE_ERROR;

    GST_DEBUG_OBJECT (eglglessink, "Drawing black border 2");

    glVertexAttribPointer (eglglessink->egl_context->position_loc[1], 3,
        GL_FLOAT, GL_FALSE, sizeof (coord5), (gpointer) (8 * sizeof (coord5)));
    if (got_gl_error ("glVertexAttribPointer"))
      goto HANDLE_ERROR;

    glDrawElements (GL_TRIANGLE_STRIP, 4, GL_UNSIGNED_SHORT, 0);
    if (got_gl_error ("glDrawElements"))
      goto HANDLE_ERROR;
  }

  /* Draw video frame */
  GST_DEBUG_OBJECT (eglglessink, "Drawing video frame");
  glUseProgram (eglglessink->egl_context->glslprogram[0]);

  for (i = 0; i < eglglessink->egl_context->n_textures; i++) {
    glUniform1i (eglglessink->egl_context->tex_loc[0][i], i);
    if (got_gl_error ("glUniform1i"))
      goto HANDLE_ERROR;
  }

  glVertexAttribPointer (eglglessink->egl_context->position_loc[0], 3,
      GL_FLOAT, GL_FALSE, sizeof (coord5), (gpointer) (0 * sizeof (coord5)));
  if (got_gl_error ("glVertexAttribPointer"))
    goto HANDLE_ERROR;

  glVertexAttribPointer (eglglessink->egl_context->texpos_loc[0], 2, GL_FLOAT,
      GL_FALSE, sizeof (coord5), (gpointer) (3 * sizeof (gfloat)));
  if (got_gl_error ("glVertexAttribPointer"))
    goto HANDLE_ERROR;

  glDrawElements (GL_TRIANGLE_STRIP, 4, GL_UNSIGNED_SHORT, 0);
  if (got_gl_error ("glDrawElements"))
    goto HANDLE_ERROR;

  if (!gst_egl_adaptation_swap_buffers (eglglessink->egl_context)) {
    goto HANDLE_ERROR;
  }

  GST_DEBUG_OBJECT (eglglessink, "Succesfully rendered 1 frame");
  return GST_FLOW_OK;

HANDLE_ERROR:
  GST_ERROR_OBJECT (eglglessink, "Rendering disabled for this frame");

  return GST_FLOW_ERROR;
}
Ejemplo n.º 13
0
static GstFlowReturn
gst_vdp_vpp_chain (GstPad * pad, GstBuffer * buffer)
{
  GstVdpVideoPostProcess *vpp =
      GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad));

  GstFlowReturn ret = GST_FLOW_OK;

  GstVdpPicture current_pic;

  guint32 video_surfaces_past_count;
  VdpVideoSurface video_surfaces_past[MAX_PICTURES];

  guint32 video_surfaces_future_count;
  VdpVideoSurface video_surfaces_future[MAX_PICTURES];

  if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT))) {
    GST_DEBUG_OBJECT (vpp, "Received discont buffer");
    gst_vdp_vpp_flush (vpp);
  }

  gst_vdp_vpp_add_buffer (vpp, GST_VDP_VIDEO_BUFFER (buffer));

  while (gst_vdp_vpp_get_next_picture (vpp,
          &current_pic,
          &video_surfaces_past_count, video_surfaces_past,
          &video_surfaces_future_count, video_surfaces_future)) {
    GstVdpOutputBuffer *outbuf;

    GstStructure *structure;
    GstVideoRectangle src_r = { 0, }
    , dest_r = {
    0,};
    gint par_n, par_d;
    VdpRect rect;

    GstVdpDevice *device;
    VdpStatus status;

    ret =
        gst_vdp_vpp_alloc_output_buffer (vpp, GST_PAD_CAPS (vpp->srcpad),
        &outbuf);
    if (ret != GST_FLOW_OK)
      break;

    structure = gst_caps_get_structure (GST_BUFFER_CAPS (buffer), 0);
    if (!gst_structure_get_int (structure, "width", &src_r.w) ||
        !gst_structure_get_int (structure, "height", &src_r.h))
      goto invalid_caps;

    if (gst_structure_get_fraction (structure, "pixel-aspect-ratio", &par_n,
            &par_d)) {
      gint new_width;

      new_width = gst_util_uint64_scale_int (src_r.w, par_n, par_d);
      src_r.x += (src_r.w - new_width) / 2;
      src_r.w = new_width;
    }

    structure = gst_caps_get_structure (GST_BUFFER_CAPS (outbuf), 0);
    if (!gst_structure_get_int (structure, "width", &dest_r.w) ||
        !gst_structure_get_int (structure, "height", &dest_r.h))
      goto invalid_caps;

    if (vpp->force_aspect_ratio) {
      GstVideoRectangle res_r;

      gst_video_sink_center_rect (src_r, dest_r, &res_r, TRUE);
      rect.x0 = res_r.x;
      rect.x1 = res_r.w + res_r.x;
      rect.y0 = res_r.y;
      rect.y1 = res_r.h + res_r.y;
    } else {
      rect.x0 = 0;
      rect.x1 = dest_r.w;
      rect.y0 = 0;
      rect.y1 = dest_r.h;
    }

    device = vpp->device;
    status =
        device->vdp_video_mixer_render (vpp->mixer, VDP_INVALID_HANDLE, NULL,
        current_pic.structure, video_surfaces_past_count, video_surfaces_past,
        current_pic.buf->surface, video_surfaces_future_count,
        video_surfaces_future, NULL, outbuf->surface, NULL, &rect, 0, NULL);
    if (status != VDP_STATUS_OK) {
      GST_ELEMENT_ERROR (vpp, RESOURCE, READ,
          ("Could not post process frame"),
          ("Error returned from vdpau was: %s",
              device->vdp_get_error_string (status)));
      ret = GST_FLOW_ERROR;
      goto done;
    }

    GST_BUFFER_TIMESTAMP (outbuf) = current_pic.timestamp;
    if (gst_vdp_vpp_is_interlaced (vpp))
      GST_BUFFER_DURATION (outbuf) = vpp->field_duration;
    else
      GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (current_pic.buf);

    if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_DISCONT))
      GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);

    if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_PREROLL))
      GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_PREROLL);

    if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_GAP))
      GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP);

    ret = gst_pad_push (vpp->srcpad, GST_BUFFER (outbuf));
    if (ret != GST_FLOW_OK)
      break;

    continue;

  invalid_caps:
    gst_buffer_unref (GST_BUFFER (outbuf));
    ret = GST_FLOW_ERROR;
    break;
  }

done:
  gst_object_unref (vpp);

  return ret;
}
Ejemplo n.º 14
0
static GstFlowReturn
gst_kms_sink_show_frame (GstVideoSink * vsink, GstBuffer * buf)
{
  gint ret;
  GstBuffer *buffer;
  guint32 fb_id;
  GstKMSSink *self;
  GstVideoCropMeta *crop;
  GstVideoRectangle src = { 0, };
  GstVideoRectangle dst = { 0, };
  GstVideoRectangle result;
  GstFlowReturn res;

  self = GST_KMS_SINK (vsink);

  res = GST_FLOW_ERROR;

  buffer = gst_kms_sink_get_input_buffer (self, buf);
  if (!buffer)
    return GST_FLOW_ERROR;
  fb_id = gst_kms_memory_get_fb_id (gst_buffer_peek_memory (buffer, 0));
  if (fb_id == 0)
    goto buffer_invalid;

  GST_TRACE_OBJECT (self, "displaying fb %d", fb_id);

  {
    if ((crop = gst_buffer_get_video_crop_meta (buffer))) {
      src.x = crop->x;
      src.y = crop->y;
      src.w = crop->width;
      src.h = crop->height;
    } else {
      src.w = GST_VIDEO_SINK_WIDTH (self);
      src.h = GST_VIDEO_SINK_HEIGHT (self);
    }
  }

  dst.w = self->hdisplay;
  dst.h = self->vdisplay;

  gst_video_sink_center_rect (src, dst, &result, FALSE);

  /* if the frame size is bigger than the display size, the source
   * must be the display size */
  src.w = MIN (src.w, self->hdisplay);
  src.h = MIN (src.h, self->vdisplay);

  ret = drmModeSetPlane (self->fd, self->plane_id, self->crtc_id, fb_id, 0,
      result.x, result.y, result.w, result.h,
      /* source/cropping coordinates are given in Q16 */
      src.x << 16, src.y << 16, src.w << 16, src.h << 16);
  if (ret)
    goto set_plane_failed;

  /* Wait for the previous frame to complete redraw */
  if (!gst_kms_sink_sync (self))
    goto bail;

  gst_buffer_replace (&self->last_buffer, buffer);

  res = GST_FLOW_OK;

bail:
  gst_buffer_unref (buffer);
  return res;

  /* ERRORS */
buffer_invalid:
  {
    GST_ERROR_OBJECT (self, "invalid buffer: it doesn't have a fb id");
    goto bail;
  }
set_plane_failed:
  {
    GST_DEBUG_OBJECT (self, "result = { %d, %d, %d, %d} / "
        "src = { %d, %d, %d %d } / dst = { %d, %d, %d %d }", result.x, result.y,
        result.w, result.h, src.x, src.y, src.w, src.h, dst.x, dst.y, dst.w,
        dst.h);
    GST_ELEMENT_ERROR (self, RESOURCE, FAILED,
        (NULL), ("drmModeSetPlane failed: %s (%d)", strerror (-ret), ret));
    goto bail;
  }
}
static GstFlowReturn
gst_kms_sink_show_frame (GstVideoSink * vsink, GstBuffer * buf)
{
  gint ret;
  GstBuffer *buffer;
  guint32 fb_id;
  GstKMSSink *self;
  GstVideoCropMeta *crop;
  GstVideoRectangle src = { 0, };
  GstVideoRectangle dst = { 0, };
  GstVideoRectangle result;
  GstFlowReturn res;

  self = GST_KMS_SINK (vsink);

  res = GST_FLOW_ERROR;

  buffer = gst_kms_sink_get_input_buffer (self, buf);
  if (!buffer)
    return GST_FLOW_ERROR;
  fb_id = gst_kms_memory_get_fb_id (gst_buffer_peek_memory (buffer, 0));
  if (fb_id == 0)
    goto buffer_invalid;

  GST_TRACE_OBJECT (self, "displaying fb %d", fb_id);

  if (self->modesetting_enabled) {
    self->buffer_id = fb_id;
    goto sync_frame;
  }

  if ((crop = gst_buffer_get_video_crop_meta (buffer))) {
    GstVideoInfo vinfo = self->vinfo;
    vinfo.width = crop->width;
    vinfo.height = crop->height;

    if (!gst_kms_sink_calculate_display_ratio (self, &vinfo))
      goto no_disp_ratio;

    src.x = crop->x;
    src.y = crop->y;
  }

  src.w = GST_VIDEO_SINK_WIDTH (self);
  src.h = GST_VIDEO_SINK_HEIGHT (self);

  dst.w = self->hdisplay;
  dst.h = self->vdisplay;

  gst_video_sink_center_rect (src, dst, &result, TRUE);

  if (crop) {
    src.w = crop->width;
    src.h = crop->height;
  } else {
    src.w = GST_VIDEO_INFO_WIDTH (&self->vinfo);
    src.h = GST_VIDEO_INFO_HEIGHT (&self->vinfo);
  }

  GST_TRACE_OBJECT (self,
      "drmModeSetPlane at (%i,%i) %ix%i sourcing at (%i,%i) %ix%i",
      result.x, result.y, result.w, result.h, src.x, src.y, src.w, src.h);

  ret = drmModeSetPlane (self->fd, self->plane_id, self->crtc_id, fb_id, 0,
      result.x, result.y, result.w, result.h,
      /* source/cropping coordinates are given in Q16 */
      src.x << 16, src.y << 16, src.w << 16, src.h << 16);
  if (ret)
    goto set_plane_failed;

sync_frame:
  /* Wait for the previous frame to complete redraw */
  if (!gst_kms_sink_sync (self))
    goto bail;

  gst_buffer_replace (&self->last_buffer, buffer);
  g_clear_pointer (&self->tmp_kmsmem, gst_memory_unref);

  res = GST_FLOW_OK;

bail:
  gst_buffer_unref (buffer);
  return res;

  /* ERRORS */
buffer_invalid:
  {
    GST_ERROR_OBJECT (self, "invalid buffer: it doesn't have a fb id");
    goto bail;
  }
set_plane_failed:
  {
    GST_DEBUG_OBJECT (self, "result = { %d, %d, %d, %d} / "
        "src = { %d, %d, %d %d } / dst = { %d, %d, %d %d }", result.x, result.y,
        result.w, result.h, src.x, src.y, src.w, src.h, dst.x, dst.y, dst.w,
        dst.h);
    GST_ELEMENT_ERROR (self, RESOURCE, FAILED,
        (NULL), ("drmModeSetPlane failed: %s (%d)", strerror (-ret), ret));
    goto bail;
  }
no_disp_ratio:
  {
    GST_ELEMENT_ERROR (self, CORE, NEGOTIATION, (NULL),
        ("Error calculating the output display ratio of the video."));
    goto bail;
  }
}
Ejemplo n.º 16
0
static void
gst_pvrvideosink_blit (GstPVRVideoSink * pvrvideosink, GstBuffer * buffer)
{
  PVR2DERROR pvr_error;
  GstDrawContext *dcontext = pvrvideosink->dcontext;
  gint video_width;
  gint video_height;
  gboolean draw_border = FALSE;
  PPVR2D_3DBLT_EXT p_blt_3d;
  PVR2DMEMINFO *src_mem;
  PVR2DFORMAT pvr_format;
  GstVideoRectangle result;
  GstPVRMeta *meta;
  GstVideoCropMeta *cropmeta;

  GST_DEBUG_OBJECT (pvrvideosink, "buffer %p", buffer);

  pvr_format =
      GST_VIDEO_INFO_FORMAT (&pvrvideosink->info) ==
      GST_VIDEO_FORMAT_NV12 ? PVR2D_YUV420_2PLANE : PVR2D_ARGB8888;

  g_mutex_lock (pvrvideosink->flow_lock);
  if (buffer == NULL)
    buffer = pvrvideosink->current_buffer;

  if (buffer == NULL)
    goto done;

  meta = gst_buffer_get_pvr_meta (buffer);
  if (G_UNLIKELY (meta == NULL))
    goto no_pvr_meta;

  src_mem = meta->src_mem;
  p_blt_3d = dcontext->p_blt_info;

  video_width = GST_VIDEO_SINK_WIDTH (pvrvideosink);
  video_height = GST_VIDEO_SINK_HEIGHT (pvrvideosink);

  g_mutex_lock (pvrvideosink->dcontext->x_lock);

  /* Draw borders when displaying the first frame. After this
     draw borders only on expose event or after a size change. */
  if (!(pvrvideosink->current_buffer) || pvrvideosink->redraw_borders) {
    draw_border = TRUE;
  }

  /* Store a reference to the last image we put, lose the previous one */
  if (buffer && pvrvideosink->current_buffer != buffer) {
    if (pvrvideosink->current_buffer) {
      GST_LOG_OBJECT (pvrvideosink, "unreffing %p",
          pvrvideosink->current_buffer);
      gst_buffer_unref (GST_BUFFER_CAST (pvrvideosink->current_buffer));
    }
    GST_LOG_OBJECT (pvrvideosink, "reffing %p as our current buffer", buffer);
    pvrvideosink->current_buffer = gst_buffer_ref (buffer);
  }

  if (pvrvideosink->keep_aspect) {
    GstVideoRectangle src, dst;

    src.w = GST_VIDEO_SINK_WIDTH (pvrvideosink);
    src.h = GST_VIDEO_SINK_HEIGHT (pvrvideosink);
    dst.w = pvrvideosink->render_rect.w;
    dst.h = pvrvideosink->render_rect.h;
    gst_video_sink_center_rect (src, dst, &result, TRUE);
    result.x += pvrvideosink->render_rect.x;
    result.y += pvrvideosink->render_rect.y;
  } else {
    memcpy (&result, &pvrvideosink->render_rect, sizeof (GstVideoRectangle));
  }

  p_blt_3d->sDst.pSurfMemInfo = &dcontext->dst_mem;
  p_blt_3d->sDst.SurfOffset = 0;
  p_blt_3d->sDst.Stride = 4 * pvrvideosink->render_params.ui32Stride;
  p_blt_3d->sDst.Format = PVR2D_ARGB8888;
  p_blt_3d->sDst.SurfWidth = pvrvideosink->xwindow->width;
  p_blt_3d->sDst.SurfHeight = pvrvideosink->xwindow->height;

  p_blt_3d->rcDest.left = result.x;
  p_blt_3d->rcDest.top = result.y;
  p_blt_3d->rcDest.right = result.w + result.x;
  p_blt_3d->rcDest.bottom = result.h + result.y;

  p_blt_3d->sSrc.pSurfMemInfo = src_mem;
  p_blt_3d->sSrc.SurfOffset = 0;
  p_blt_3d->sSrc.Stride = GST_VIDEO_INFO_COMP_STRIDE (&pvrvideosink->info, 0);
  p_blt_3d->sSrc.Format = pvr_format;
  p_blt_3d->sSrc.SurfWidth = video_width;
  p_blt_3d->sSrc.SurfHeight = video_height;

  /* If buffer has crop information, use that */
  if ((cropmeta = gst_buffer_get_video_crop_meta (buffer))) {
    p_blt_3d->rcSource.left = cropmeta->x;
    p_blt_3d->rcSource.top = cropmeta->y;
    p_blt_3d->rcSource.right = cropmeta->x + cropmeta->width;
    p_blt_3d->rcSource.bottom = cropmeta->y + cropmeta->height;
  } else {
    p_blt_3d->rcSource.left = 0;
    p_blt_3d->rcSource.top = 0;
    p_blt_3d->rcSource.right = video_width;
    p_blt_3d->rcSource.bottom = video_height;
  }

  p_blt_3d->hUseCode = NULL;

  if (GST_VIDEO_INFO_FORMAT (&pvrvideosink->info) == GST_VIDEO_FORMAT_NV12)
    p_blt_3d->bDisableDestInput = TRUE;
  else
    /* blit fails for RGB without this... not sure why yet... */
    p_blt_3d->bDisableDestInput = FALSE;

  GST_DEBUG_OBJECT (pvrvideosink, "about to blit");

  pvr_error = PVR2DBlt3DExt (pvrvideosink->dcontext->pvr_context,
      dcontext->p_blt_info);

  if (pvr_error != PVR2D_OK) {
    GST_ERROR_OBJECT (pvrvideosink, "Failed to blit. Error : %s",
        gst_pvr2d_error_get_string (pvr_error));
    goto done;
  }
  dcontext->wsegl_table->pfnWSEGL_SwapDrawable (dcontext->drawable_handle, 1);

  if (draw_border) {
    gst_pvrvideosink_xwindow_draw_borders (pvrvideosink, pvrvideosink->xwindow,
        result);
    pvrvideosink->redraw_borders = FALSE;
  }
  g_mutex_unlock (pvrvideosink->dcontext->x_lock);

done:
  GST_DEBUG_OBJECT (pvrvideosink, "end");
  g_mutex_unlock (pvrvideosink->flow_lock);
  return;

  /* Error cases */

no_pvr_meta:
  {
    g_mutex_unlock (pvrvideosink->flow_lock);
    GST_ERROR_OBJECT (pvrvideosink, "Got a buffer without GstPVRMeta");
    return;
  }
}
Ejemplo n.º 17
0
static GstFlowReturn
gst_wayland_sink_render (GstBaseSink * bsink, GstBuffer * buffer)
{
  GstWaylandSink *sink = GST_WAYLAND_SINK (bsink);
  GstVideoRectangle src, dst, res;
  GstBuffer *to_render;
  GstWlMeta *meta;
  GstFlowReturn ret;
  struct window *window;
  struct display *display;

  GST_LOG_OBJECT (sink, "render buffer %p", buffer);
  if (!sink->window)
    create_window (sink, sink->display, sink->video_width, sink->video_height);

  window = sink->window;
  display = sink->display;

  meta = gst_buffer_get_wl_meta (buffer);

  if (window->redraw_pending) {
    wl_display_dispatch (display->display);
  }

  if (meta && meta->sink == sink) {
    GST_LOG_OBJECT (sink, "buffer %p from our pool, writing directly", buffer);
    to_render = buffer;
  } else {
    GstMapInfo src;
    GST_LOG_OBJECT (sink, "buffer %p not from our pool, copying", buffer);

    if (!sink->pool)
      goto no_pool;

    if (!gst_buffer_pool_set_active (sink->pool, TRUE))
      goto activate_failed;

    ret = gst_buffer_pool_acquire_buffer (sink->pool, &to_render, NULL);
    if (ret != GST_FLOW_OK)
      goto no_buffer;

    gst_buffer_map (buffer, &src, GST_MAP_READ);
    gst_buffer_fill (to_render, 0, src.data, src.size);
    gst_buffer_unmap (buffer, &src);

    meta = gst_buffer_get_wl_meta (to_render);
  }

  src.w = sink->video_width;
  src.h = sink->video_height;
  dst.w = sink->window->width;
  dst.h = sink->window->height;

  gst_video_sink_center_rect (src, dst, &res, FALSE);

  wl_surface_attach (sink->window->surface, meta->wbuffer, 0, 0);
  wl_surface_damage (sink->window->surface, 0, 0, res.w, res.h);
  window->redraw_pending = TRUE;
  window->callback = wl_surface_frame (window->surface);
  wl_callback_add_listener (window->callback, &frame_callback_listener, window);
  wl_surface_commit (window->surface);
  wl_display_dispatch (display->display);

  if (buffer != to_render)
    gst_buffer_unref (to_render);
  return GST_FLOW_OK;

no_buffer:
  {
    GST_WARNING_OBJECT (sink, "could not create image");
    return ret;
  }
no_pool:
  {
    GST_ELEMENT_ERROR (sink, RESOURCE, WRITE,
        ("Internal error: can't allocate images"),
        ("We don't have a bufferpool negotiated"));
    return GST_FLOW_ERROR;
  }
activate_failed:
  {
    GST_ERROR_OBJECT (sink, "failed to activate bufferpool.");
    ret = GST_FLOW_ERROR;
    return ret;
  }
}
static GstFlowReturn
gst_wayland_sink_render (GstBaseSink * bsink, GstBuffer * buffer)
{
  GstWaylandSink *sink = GST_WAYLAND_SINK (bsink);
  gboolean mem_cpy = TRUE;
  GstVideoRectangle src, dst, res;

  GST_LOG_OBJECT (sink,
      "render buffer %p, data = %p, timestamp = %" GST_TIME_FORMAT, buffer,
      GST_BUFFER_DATA (buffer), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));

  if (!sink->window)
    create_window (sink, sink->display, sink->video_width, sink->video_height);

  if (sink->render_finish) {
    if (GST_IS_WLBUFFER (buffer)) {
      GstWlBuffer *tmp_buffer = (GstWlBuffer *) buffer;

      /* Does it have a waylandbuffer ? */
      if (tmp_buffer->wbuffer) {
        mem_cpy = FALSE;
        GST_DEBUG_OBJECT (sink, "we have a buffer (%p) we allocated "
            "ourselves and it has a wayland buffer, no memcpy then", buffer);
        sink->window->buffer = tmp_buffer->wbuffer;
      } else {
        /* No wayland buffer, that's a malloc */
        GST_DEBUG_OBJECT (sink, "we have a buffer (%p) we allocated "
            "ourselves but it does not hold a wayland buffer", buffer);
      }
    } else {
      /* Not our baby! */
      GST_DEBUG_OBJECT (sink, "we have a buffer (%p) we did not allocate",
          buffer);
    }

    if (mem_cpy) {

      GstWlBuffer *wlbuf = wayland_buffer_create (sink);

      memcpy (GST_BUFFER_DATA (wlbuf), GST_BUFFER_DATA (buffer),
          GST_BUFFER_SIZE (buffer));
      sink->window->buffer = wlbuf->wbuffer;
    }

    src.w = sink->video_width;
    src.h = sink->video_height;
    dst.w = sink->window->width;
    dst.h = sink->window->height;

    gst_video_sink_center_rect (src, dst, &res, FALSE);

    sink->render_finish = FALSE;

    wl_surface_attach (sink->window->surface, sink->window->buffer, 0, 0);
    wl_surface_damage (sink->window->surface, 0, 0, res.w, res.h);

    if (sink->callback)
      wl_callback_destroy (sink->callback);

    sink->callback = wl_surface_frame (sink->window->surface);
    wl_callback_add_listener (sink->callback, &frame_listener, sink);
    wl_surface_commit (sink->window->surface);

  } else
    GST_LOG_OBJECT (sink,
        "Waiting to get the signal from compositor to render the next frame..");

  wl_display_dispatch (sink->display->display);

  return GST_FLOW_OK;
}