Exemple #1
0
static GstFlowReturn
gst_vdp_mpeg_dec_alloc_buffer (GstVdpMpegDec * mpeg_dec, GstBuffer ** outbuf)
{
    GstFlowReturn ret;

    ret = gst_pad_alloc_buffer_and_set_caps (mpeg_dec->src, 0, 0,
            GST_PAD_CAPS (mpeg_dec->src), outbuf);
    if (ret != GST_FLOW_OK)
        return ret;

    if (!mpeg_dec->device) {
        GstVdpDevice *device;
        VdpStatus status;

        device = mpeg_dec->device =
                     g_object_ref (GST_VDP_VIDEO_BUFFER (*outbuf)->device);

        status = device->vdp_decoder_create (device->device, mpeg_dec->profile,
                                             mpeg_dec->width, mpeg_dec->height, 2, &mpeg_dec->decoder);
        if (status != VDP_STATUS_OK) {
            GST_ELEMENT_ERROR (mpeg_dec, RESOURCE, READ,
                               ("Could not create vdpau decoder"),
                               ("Error returned from vdpau was: %s",
                                device->vdp_get_error_string (status)));
            ret = GST_FLOW_ERROR;
        }
    }

    return ret;
}
Exemple #2
0
static void
gst_vdp_video_buffer_finalize (GstVdpVideoBuffer * buffer)
{
  GSList *iter;
  GstVdpDevice *device;
  VdpStatus status;

  device = buffer->device;

  status = device->vdp_video_surface_destroy (buffer->surface);
  if (status != VDP_STATUS_OK)
    GST_ERROR
        ("Couldn't destroy the buffers VdpVideoSurface, error returned was: %s",
        device->vdp_get_error_string (status));

  g_object_unref (buffer->device);

  for (iter = buffer->refs; iter; iter = g_slist_next (iter)) {
    GstBuffer *buf;

    buf = (GstBuffer *) (iter->data);
    gst_buffer_unref (buf);
  }
  g_slist_free (buffer->refs);

  GST_MINI_OBJECT_CLASS (gst_vdp_video_buffer_parent_class)->finalize
      (GST_MINI_OBJECT (buffer));
}
static GstFlowReturn
gst_vdp_vpp_create_mixer (GstVdpVideoPostProcess * vpp)
{
#define VDP_NUM_MIXER_PARAMETER 3
#define MAX_NUM_FEATURES 5

  VdpStatus status;
  GstVdpDevice *device;

  VdpVideoMixerFeature features[5];
  guint n_features = 0;
  VdpVideoMixerParameter parameters[VDP_NUM_MIXER_PARAMETER] = {
    VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_WIDTH,
    VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_HEIGHT,
    VDP_VIDEO_MIXER_PARAMETER_CHROMA_TYPE
  };
  const void *parameter_values[VDP_NUM_MIXER_PARAMETER];

  parameter_values[0] = &vpp->width;
  parameter_values[1] = &vpp->height;
  parameter_values[2] = &vpp->chroma_type;

  if (gst_vdp_vpp_is_interlaced (vpp)
      && vpp->method != GST_VDP_DEINTERLACE_METHOD_BOB) {
    features[n_features++] =
        gst_vdp_feature_from_deinterlace_method (vpp->method);
  }
  if (vpp->noise_reduction > 0.0)
    features[n_features++] = VDP_VIDEO_MIXER_FEATURE_NOISE_REDUCTION;
  if (vpp->sharpening != 0.0)
    features[n_features++] = VDP_VIDEO_MIXER_FEATURE_SHARPNESS;
  if (vpp->inverse_telecine)
    features[n_features++] = VDP_VIDEO_MIXER_FEATURE_INVERSE_TELECINE;

  device = vpp->device;

  status =
      device->vdp_video_mixer_create (device->device, n_features, features,
      VDP_NUM_MIXER_PARAMETER, parameters, parameter_values, &vpp->mixer);
  if (status != VDP_STATUS_OK) {
    GST_ELEMENT_ERROR (vpp, RESOURCE, READ,
        ("Could not create vdpau video mixer"),
        ("Error returned from vdpau was: %s",
            device->vdp_get_error_string (status)));
    return GST_FLOW_ERROR;
  }

  if (vpp->noise_reduction > 0.0) {
    gst_vdp_vpp_set_attribute_float (vpp,
        VDP_VIDEO_MIXER_ATTRIBUTE_NOISE_REDUCTION_LEVEL, vpp->noise_reduction);
  }
  if (vpp->sharpening != 0.0) {
    gst_vdp_vpp_set_attribute_float (vpp,
        VDP_VIDEO_MIXER_ATTRIBUTE_SHARPNESS_LEVEL, vpp->sharpening);
  }

  return GST_FLOW_OK;
}
Exemple #4
0
gboolean
gst_vdp_output_buffer_download (GstVdpOutputBuffer * output_buf,
    GstBuffer * outbuf, GError ** error)
{
  guint8 *data[1];
  guint32 stride[1];
  GstVdpDevice *device;
  VdpOutputSurface surface;
  VdpStatus status;

  g_return_val_if_fail (GST_IS_VDP_OUTPUT_BUFFER (output_buf), FALSE);

  switch (output_buf->rgba_format) {
    case VDP_RGBA_FORMAT_A8:
    {
      stride[0] = output_buf->width;
      break;
    }

    case VDP_RGBA_FORMAT_B10G10R10A2:
    case VDP_RGBA_FORMAT_B8G8R8A8:
    case VDP_RGBA_FORMAT_R10G10B10A2:
    case VDP_RGBA_FORMAT_R8G8B8A8:
    {
      stride[0] = output_buf->width * 4;
      break;
    }

    default:
      return FALSE;
  }

  device = output_buf->device;
  surface = output_buf->surface;
  data[0] = GST_BUFFER_DATA (outbuf);

  GST_LOG_OBJECT (output_buf, "Entering vdp_output_surface_get_bits_native");
  status =
      device->vdp_output_surface_get_bits_native (surface, NULL, (void *) data,
      stride);
  GST_LOG_OBJECT (output_buf,
      "Got status %d from vdp_output_get_bits_native", status);

  if (G_UNLIKELY (status != VDP_STATUS_OK)) {
    g_set_error (error, GST_RESOURCE_ERROR, GST_RESOURCE_ERROR_READ,
        "Couldn't get data from vdpau, error returned from vdpau was: %s",
        device->vdp_get_error_string (status));
    return FALSE;
  }

  return TRUE;
}
Exemple #5
0
static void
gst_vdp_output_buffer_finalize (GstVdpOutputBuffer * buffer)
{
  GstVdpDevice *device;
  VdpStatus status;

  device = buffer->device;

  status = device->vdp_output_surface_destroy (buffer->surface);
  if (status != VDP_STATUS_OK)
    GST_ERROR
        ("Couldn't destroy the buffers VdpOutputSurface, error returned was: %s",
        device->vdp_get_error_string (status));

  g_object_unref (buffer->device);

  GST_MINI_OBJECT_CLASS (gst_vdp_output_buffer_parent_class)->finalize
      (GST_MINI_OBJECT (buffer));
}
Exemple #6
0
static void
gst_vdp_device_finalize (GObject * object)
{
  GstVdpDevice *device = (GstVdpDevice *) object;

  if (device->device != VDP_INVALID_HANDLE && device->vdp_decoder_destroy) {
    device->vdp_device_destroy (device->device);
    device->device = VDP_INVALID_HANDLE;
  }

  if (device->display) {
    XCloseDisplay (device->display);
    device->display = NULL;
  }

  g_free (device->display_name);
  device->display_name = NULL;

  G_OBJECT_CLASS (gst_vdp_device_parent_class)->finalize (object);
}
static gboolean
gst_vdp_vpp_stop (GstVdpVideoPostProcess * vpp)
{
  gst_vdp_vpp_flush (vpp);

  if (vpp->vpool)
    g_object_unref (vpp->vpool);

  if (vpp->mixer != VDP_INVALID_HANDLE) {
    GstVdpDevice *device = vpp->device;
    VdpStatus status;

    status = device->vdp_video_mixer_destroy (vpp->mixer);
    if (status != VDP_STATUS_OK) {
      GST_ELEMENT_ERROR (vpp, RESOURCE, READ,
          ("Could not destroy vdpau decoder"),
          ("Error returned from vdpau was: %s",
              device->vdp_get_error_string (status)));
      return FALSE;
    }
  }

  return TRUE;
}
static void
gst_vdp_sink_window_setup_vdpau (VdpSink * vdp_sink, GstVdpWindow * window)
{
  GstVdpDevice *device = vdp_sink->device;
  VdpStatus status;
  VdpColor color = { 0, };

  status = device->vdp_presentation_queue_target_create_x11 (device->device,
      window->win, &window->target);
  if (status != VDP_STATUS_OK) {
    GST_ELEMENT_ERROR (vdp_sink, RESOURCE, READ,
        ("Could not create presentation target"),
        ("Error returned from vdpau was: %s",
            device->vdp_get_error_string (status)));
  }

  status =
      device->vdp_presentation_queue_create (device->device, window->target,
      &window->queue);
  if (status != VDP_STATUS_OK) {
    GST_ELEMENT_ERROR (vdp_sink, RESOURCE, READ,
        ("Could not create presentation queue"),
        ("Error returned from vdpau was: %s",
            device->vdp_get_error_string (status)));
  }

  status =
      device->vdp_presentation_queue_set_background_color (window->queue,
      &color);
  if (status != VDP_STATUS_OK) {
    GST_ELEMENT_ERROR (vdp_sink, RESOURCE, READ,
        ("Could not set background color"),
        ("Error returned from vdpau was: %s",
            device->vdp_get_error_string (status)));
  }
}
Exemple #9
0
static GstFlowReturn
gst_vdp_sink_show_frame (GstBaseSink * bsink, GstBuffer * outbuf)
{
  VdpSink *vdp_sink = GST_VDP_SINK (bsink);
  VdpStatus status;
  GstVdpDevice *device;

  g_return_val_if_fail (GST_IS_VDP_SINK (vdp_sink), FALSE);

  /* We take the flow_lock. If expose is in there we don't want to run
     concurrently from the data flow thread */
  g_mutex_lock (vdp_sink->flow_lock);

  if (G_UNLIKELY (vdp_sink->window == NULL)) {
    g_mutex_unlock (vdp_sink->flow_lock);
    return GST_FLOW_ERROR;
  }

  device = vdp_sink->device;

  if (vdp_sink->cur_image) {
    VdpOutputSurface surface =
        GST_VDP_OUTPUT_BUFFER (vdp_sink->cur_image)->surface;
    VdpPresentationQueueStatus queue_status;
    VdpTime pres_time;

    g_mutex_lock (vdp_sink->x_lock);
    status =
        device->vdp_presentation_queue_query_surface_status (vdp_sink->
        window->queue, surface, &queue_status, &pres_time);
    g_mutex_unlock (vdp_sink->x_lock);

    if (queue_status == VDP_PRESENTATION_QUEUE_STATUS_QUEUED) {
      g_mutex_unlock (vdp_sink->flow_lock);
      return GST_FLOW_OK;
    }
  }

  /* Expose sends a NULL image, we take the latest frame */
  if (!outbuf) {
    if (vdp_sink->cur_image) {
      outbuf = vdp_sink->cur_image;
    } else {
      g_mutex_unlock (vdp_sink->flow_lock);
      return GST_FLOW_OK;
    }
  }

  gst_vdp_sink_window_update_geometry (vdp_sink, vdp_sink->window);

  g_mutex_lock (vdp_sink->x_lock);

  status = device->vdp_presentation_queue_display (vdp_sink->window->queue,
      GST_VDP_OUTPUT_BUFFER (outbuf)->surface, 0, 0, 0);
  if (status != VDP_STATUS_OK) {
    GST_ELEMENT_ERROR (vdp_sink, RESOURCE, READ,
        ("Could not display frame"),
        ("Error returned from vdpau was: %s",
            device->vdp_get_error_string (status)));

    g_mutex_unlock (vdp_sink->x_lock);
    g_mutex_unlock (vdp_sink->flow_lock);
    return GST_FLOW_ERROR;
  }


  if (!vdp_sink->cur_image)
    vdp_sink->cur_image = gst_buffer_ref (outbuf);

  else if (vdp_sink->cur_image != outbuf) {
    gst_buffer_unref (vdp_sink->cur_image);
    vdp_sink->cur_image = gst_buffer_ref (outbuf);
  }

  XSync (vdp_sink->device->display, FALSE);

  g_mutex_unlock (vdp_sink->x_lock);
  g_mutex_unlock (vdp_sink->flow_lock);

  return GST_FLOW_OK;
}
Exemple #10
0
/* This function handles a GstVdpWindow creation */
static GstVdpWindow *
gst_vdp_sink_window_new (VdpSink * vdp_sink, gint width, gint height)
{
  GstVdpWindow *window = NULL;
  GstVdpDevice *device = vdp_sink->device;

  Window root;
  gint screen_num;
  gulong black;

  VdpStatus status;
  VdpColor color = { 0, };

  g_return_val_if_fail (GST_IS_VDP_SINK (vdp_sink), NULL);

  window = g_new0 (GstVdpWindow, 1);

  window->width = width;
  window->height = height;
  window->internal = TRUE;

  g_mutex_lock (vdp_sink->x_lock);

  screen_num = DefaultScreen (device->display);
  root = DefaultRootWindow (device->display);
  black = XBlackPixel (device->display, screen_num);

  window->win = XCreateSimpleWindow (vdp_sink->device->display,
      root, 0, 0, window->width, window->height, 0, 0, black);

  /* We have to do that to prevent X from redrawing the background on 
     ConfigureNotify. This takes away flickering of video when resizing. */
  XSetWindowBackgroundPixmap (vdp_sink->device->display, window->win, None);

  /* set application name as a title */
  gst_vdp_sink_window_set_title (vdp_sink, window, NULL);

  if (vdp_sink->handle_events) {
    Atom wm_delete;

    XSelectInput (vdp_sink->device->display, window->win, ExposureMask |
        StructureNotifyMask | PointerMotionMask | KeyPressMask |
        KeyReleaseMask | ButtonPressMask | ButtonReleaseMask);

    /* Tell the window manager we'd like delete client messages instead of
     * being killed */
    wm_delete =
        XInternAtom (vdp_sink->device->display, "WM_DELETE_WINDOW", False);
    (void) XSetWMProtocols (vdp_sink->device->display, window->win, &wm_delete,
        1);
  }

  XMapRaised (vdp_sink->device->display, window->win);

  XSync (vdp_sink->device->display, FALSE);

  g_mutex_unlock (vdp_sink->x_lock);

  gst_vdp_sink_window_decorate (vdp_sink, window);

  status = device->vdp_presentation_queue_target_create_x11 (device->device,
      window->win, &window->target);
  if (status != VDP_STATUS_OK) {
    GST_ELEMENT_ERROR (vdp_sink, RESOURCE, READ,
        ("Could not create presentation target"),
        ("Error returned from vdpau was: %s",
            device->vdp_get_error_string (status)));
  }

  status =
      device->vdp_presentation_queue_create (device->device, window->target,
      &window->queue);
  if (status != VDP_STATUS_OK) {
    GST_ELEMENT_ERROR (vdp_sink, RESOURCE, READ,
        ("Could not create presentation queue"),
        ("Error returned from vdpau was: %s",
            device->vdp_get_error_string (status)));
  }

  status =
      device->vdp_presentation_queue_set_background_color (window->queue,
      &color);
  if (status != VDP_STATUS_OK) {
    GST_ELEMENT_ERROR (vdp_sink, RESOURCE, READ,
        ("Could not set background color"),
        ("Error returned from vdpau was: %s",
            device->vdp_get_error_string (status)));
  }

  gst_x_overlay_got_xwindow_id (GST_X_OVERLAY (vdp_sink), window->win);

  return window;
}
static GstFlowReturn
gst_vdp_vpp_drain (GstVdpVideoPostProcess * vpp)
{
  GstVdpPicture current_pic;

  guint32 video_surfaces_past_count;
  VdpVideoSurface video_surfaces_past[MAX_PICTURES];

  guint32 video_surfaces_future_count;
  VdpVideoSurface video_surfaces_future[MAX_PICTURES];

  GstFlowReturn ret;

  while (gst_vdp_vpp_get_next_picture (vpp,
          &current_pic,
          &video_surfaces_past_count, video_surfaces_past,
          &video_surfaces_future_count, video_surfaces_future)) {
    GError *err;
    GstVdpOutputBuffer *outbuf;

    GstStructure *structure;
    GstVideoRectangle src_r = { 0, }
    , dest_r = {
    0,};
    VdpRect rect;

    GstVdpDevice *device;
    VdpStatus status;

    err = NULL;
    ret =
        gst_vdp_output_src_pad_alloc_buffer ((GstVdpOutputSrcPad *) vpp->srcpad,
        &outbuf, &err);
    if (ret != GST_FLOW_OK)
      goto output_pad_error;

    src_r.w = vpp->width;
    src_r.h = vpp->height;
    if (vpp->got_par) {
      gint new_width;

      new_width = gst_util_uint64_scale_int (src_r.w, vpp->par_n, vpp->par_d);
      src_r.x += (src_r.w - new_width) / 2;
      src_r.w = new_width;
    }

    structure = gst_caps_get_structure (GST_BUFFER_CAPS (outbuf), 0);
    if (!gst_structure_get_int (structure, "width", &dest_r.w) ||
        !gst_structure_get_int (structure, "height", &dest_r.h))
      goto invalid_caps;

    if (vpp->force_aspect_ratio) {
      GstVideoRectangle res_r;

      gst_video_sink_center_rect (src_r, dest_r, &res_r, TRUE);
      rect.x0 = res_r.x;
      rect.x1 = res_r.w + res_r.x;
      rect.y0 = res_r.y;
      rect.y1 = res_r.h + res_r.y;
    } else {
      rect.x0 = 0;
      rect.x1 = dest_r.w;
      rect.y0 = 0;
      rect.y1 = dest_r.h;
    }

    device = vpp->device;
    status =
        device->vdp_video_mixer_render (vpp->mixer, VDP_INVALID_HANDLE, NULL,
        current_pic.structure, video_surfaces_past_count, video_surfaces_past,
        current_pic.buf->surface, video_surfaces_future_count,
        video_surfaces_future, NULL, outbuf->surface, NULL, &rect, 0, NULL);
    if (status != VDP_STATUS_OK)
      goto render_error;

    GST_BUFFER_TIMESTAMP (outbuf) = current_pic.timestamp;
    if (gst_vdp_vpp_is_interlaced (vpp))
      GST_BUFFER_DURATION (outbuf) = vpp->field_duration;
    else
      GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (current_pic.buf);

    if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_DISCONT))
      GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);

    if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_PREROLL))
      GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_PREROLL);

    if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_GAP))
      GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP);

    err = NULL;
    ret =
        gst_vdp_output_src_pad_push ((GstVdpOutputSrcPad *) vpp->srcpad,
        outbuf, &err);
    if (ret != GST_FLOW_OK)
      goto output_pad_error;

    continue;

  invalid_caps:
    gst_buffer_unref (GST_BUFFER (outbuf));
    GST_ELEMENT_ERROR (vpp, STREAM, FAILED, ("Invalid output caps"), (NULL));
    ret = GST_FLOW_ERROR;
    break;

  render_error:
    gst_buffer_unref (GST_BUFFER (outbuf));
    GST_ELEMENT_ERROR (vpp, RESOURCE, READ,
        ("Could not postprocess frame"),
        ("Error returned from vdpau was: %s",
            device->vdp_get_error_string (status)));
    ret = GST_FLOW_ERROR;
    break;

  output_pad_error:
    if (ret == GST_FLOW_ERROR && err != NULL)
      gst_vdp_vpp_post_error (vpp, err);
    break;
  }

  return ret;
}
Exemple #12
0
static GstFlowReturn
gst_vdp_vpp_chain (GstPad * pad, GstBuffer * buffer)
{
  GstVdpVideoPostProcess *vpp =
      GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad));

  GstFlowReturn ret = GST_FLOW_OK;

  GstVdpPicture current_pic;

  guint32 video_surfaces_past_count;
  VdpVideoSurface video_surfaces_past[MAX_PICTURES];

  guint32 video_surfaces_future_count;
  VdpVideoSurface video_surfaces_future[MAX_PICTURES];

  if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT))) {
    GST_DEBUG_OBJECT (vpp, "Received discont buffer");
    gst_vdp_vpp_flush (vpp);
  }

  gst_vdp_vpp_add_buffer (vpp, GST_VDP_VIDEO_BUFFER (buffer));

  while (gst_vdp_vpp_get_next_picture (vpp,
          &current_pic,
          &video_surfaces_past_count, video_surfaces_past,
          &video_surfaces_future_count, video_surfaces_future)) {
    GstVdpOutputBuffer *outbuf;

    GstStructure *structure;
    GstVideoRectangle src_r = { 0, }
    , dest_r = {
    0,};
    gint par_n, par_d;
    VdpRect rect;

    GstVdpDevice *device;
    VdpStatus status;

    ret =
        gst_vdp_vpp_alloc_output_buffer (vpp, GST_PAD_CAPS (vpp->srcpad),
        &outbuf);
    if (ret != GST_FLOW_OK)
      break;

    structure = gst_caps_get_structure (GST_BUFFER_CAPS (buffer), 0);
    if (!gst_structure_get_int (structure, "width", &src_r.w) ||
        !gst_structure_get_int (structure, "height", &src_r.h))
      goto invalid_caps;

    if (gst_structure_get_fraction (structure, "pixel-aspect-ratio", &par_n,
            &par_d)) {
      gint new_width;

      new_width = gst_util_uint64_scale_int (src_r.w, par_n, par_d);
      src_r.x += (src_r.w - new_width) / 2;
      src_r.w = new_width;
    }

    structure = gst_caps_get_structure (GST_BUFFER_CAPS (outbuf), 0);
    if (!gst_structure_get_int (structure, "width", &dest_r.w) ||
        !gst_structure_get_int (structure, "height", &dest_r.h))
      goto invalid_caps;

    if (vpp->force_aspect_ratio) {
      GstVideoRectangle res_r;

      gst_video_sink_center_rect (src_r, dest_r, &res_r, TRUE);
      rect.x0 = res_r.x;
      rect.x1 = res_r.w + res_r.x;
      rect.y0 = res_r.y;
      rect.y1 = res_r.h + res_r.y;
    } else {
      rect.x0 = 0;
      rect.x1 = dest_r.w;
      rect.y0 = 0;
      rect.y1 = dest_r.h;
    }

    device = vpp->device;
    status =
        device->vdp_video_mixer_render (vpp->mixer, VDP_INVALID_HANDLE, NULL,
        current_pic.structure, video_surfaces_past_count, video_surfaces_past,
        current_pic.buf->surface, video_surfaces_future_count,
        video_surfaces_future, NULL, outbuf->surface, NULL, &rect, 0, NULL);
    if (status != VDP_STATUS_OK) {
      GST_ELEMENT_ERROR (vpp, RESOURCE, READ,
          ("Could not post process frame"),
          ("Error returned from vdpau was: %s",
              device->vdp_get_error_string (status)));
      ret = GST_FLOW_ERROR;
      goto done;
    }

    GST_BUFFER_TIMESTAMP (outbuf) = current_pic.timestamp;
    if (gst_vdp_vpp_is_interlaced (vpp))
      GST_BUFFER_DURATION (outbuf) = vpp->field_duration;
    else
      GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (current_pic.buf);

    if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_DISCONT))
      GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);

    if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_PREROLL))
      GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_PREROLL);

    if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_GAP))
      GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP);

    ret = gst_pad_push (vpp->srcpad, GST_BUFFER (outbuf));
    if (ret != GST_FLOW_OK)
      break;

    continue;

  invalid_caps:
    gst_buffer_unref (GST_BUFFER (outbuf));
    ret = GST_FLOW_ERROR;
    break;
  }

done:
  gst_object_unref (vpp);

  return ret;
}
Exemple #13
0
static GstFlowReturn
gst_vdp_mpeg_dec_decode (GstVdpMpegDec * mpeg_dec,
                         GstClockTime timestamp, gint64 size)
{
    VdpPictureInfoMPEG1Or2 *info;
    GstBuffer *buffer;
    GstBuffer *outbuf;
    VdpVideoSurface surface;
    GstVdpDevice *device;
    VdpBitstreamBuffer vbit[1];
    VdpStatus status;

    info = &mpeg_dec->vdp_info;

    if (info->picture_coding_type != B_FRAME) {
        if (info->backward_reference != VDP_INVALID_HANDLE) {
            gst_buffer_ref (mpeg_dec->b_buffer);
            gst_vdp_mpeg_dec_push_video_buffer (mpeg_dec,
                                                GST_VDP_VIDEO_BUFFER (mpeg_dec->b_buffer));
        }

        if (info->forward_reference != VDP_INVALID_HANDLE) {
            gst_buffer_unref (mpeg_dec->f_buffer);
            info->forward_reference = VDP_INVALID_HANDLE;
        }

        info->forward_reference = info->backward_reference;
        mpeg_dec->f_buffer = mpeg_dec->b_buffer;

        info->backward_reference = VDP_INVALID_HANDLE;
    }

    if (gst_vdp_mpeg_dec_alloc_buffer (mpeg_dec, &outbuf) != GST_FLOW_OK) {
        gst_adapter_clear (mpeg_dec->adapter);
        return GST_FLOW_ERROR;
    }

    device = GST_VDP_VIDEO_BUFFER (outbuf)->device;

    if (info->forward_reference != VDP_INVALID_HANDLE &&
            info->picture_coding_type != I_FRAME)
        gst_vdp_video_buffer_add_reference (GST_VDP_VIDEO_BUFFER (outbuf),
                                            GST_VDP_VIDEO_BUFFER (mpeg_dec->f_buffer));

    if (info->backward_reference != VDP_INVALID_HANDLE
            && info->picture_coding_type == B_FRAME)
        gst_vdp_video_buffer_add_reference (GST_VDP_VIDEO_BUFFER (outbuf),
                                            GST_VDP_VIDEO_BUFFER (mpeg_dec->b_buffer));

    GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
    GST_BUFFER_DURATION (outbuf) = mpeg_dec->duration;
    GST_BUFFER_OFFSET (outbuf) = mpeg_dec->frame_nr;
    GST_BUFFER_SIZE (outbuf) = size;

    if (info->picture_coding_type == I_FRAME)
        GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
    else
        GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);

    if (info->top_field_first)
        GST_BUFFER_FLAG_SET (outbuf, GST_VIDEO_BUFFER_TFF);
    else
        GST_BUFFER_FLAG_UNSET (outbuf, GST_VIDEO_BUFFER_TFF);

    buffer = gst_adapter_take_buffer (mpeg_dec->adapter,
                                      gst_adapter_available (mpeg_dec->adapter));

    surface = GST_VDP_VIDEO_BUFFER (outbuf)->surface;

    vbit[0].struct_version = VDP_BITSTREAM_BUFFER_VERSION;
    vbit[0].bitstream = GST_BUFFER_DATA (buffer);
    vbit[0].bitstream_bytes = GST_BUFFER_SIZE (buffer);

    status = device->vdp_decoder_render (mpeg_dec->decoder, surface,
                                         (VdpPictureInfo *) info, 1, vbit);
    gst_buffer_unref (buffer);
    info->slice_count = 0;

    if (status != VDP_STATUS_OK) {
        GST_ELEMENT_ERROR (mpeg_dec, RESOURCE, READ,
                           ("Could not decode"),
                           ("Error returned from vdpau was: %s",
                            device->vdp_get_error_string (status)));

        gst_buffer_unref (GST_BUFFER (outbuf));

        return GST_FLOW_ERROR;
    }

    if (info->picture_coding_type == B_FRAME) {
        gst_vdp_mpeg_dec_push_video_buffer (mpeg_dec,
                                            GST_VDP_VIDEO_BUFFER (outbuf));
    } else {
        info->backward_reference = surface;
        mpeg_dec->b_buffer = GST_BUFFER (outbuf);
    }

    return GST_FLOW_OK;
}
Exemple #14
0
GstFlowReturn
gst_vdp_video_yuv_transform (GstBaseTransform * trans, GstBuffer * inbuf,
    GstBuffer * outbuf)
{
  GstVdpVideoYUV *video_yuv = GST_VDP_VIDEO_YUV (trans);
  GstVdpDevice *device;
  VdpVideoSurface surface;

  device = GST_VDP_VIDEO_BUFFER (inbuf)->device;
  surface = GST_VDP_VIDEO_BUFFER (inbuf)->surface;

  switch (video_yuv->format) {
    case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
    {
      VdpStatus status;
      guint8 *data[3];
      guint32 stride[3];

      data[0] = GST_BUFFER_DATA (outbuf) +
          gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
          0, video_yuv->width, video_yuv->height);
      data[1] = GST_BUFFER_DATA (outbuf) +
          gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
          2, video_yuv->width, video_yuv->height);
      data[2] = GST_BUFFER_DATA (outbuf) +
          gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
          1, video_yuv->width, video_yuv->height);

      stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
          0, video_yuv->width);
      stride[1] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
          2, video_yuv->width);
      stride[2] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
          1, video_yuv->width);

      GST_LOG_OBJECT (video_yuv, "Entering vdp_video_surface_get_bits_ycbcr");
      status =
          device->vdp_video_surface_get_bits_ycbcr (surface,
          VDP_YCBCR_FORMAT_YV12, (void *) data, stride);
      GST_LOG_OBJECT (video_yuv,
          "Got status %d from vdp_video_surface_get_bits_ycbcr", status);
      if (G_UNLIKELY (status != VDP_STATUS_OK)) {
        GST_ELEMENT_ERROR (video_yuv, RESOURCE, READ,
            ("Couldn't get data from vdpau"),
            ("Error returned from vdpau was: %s",
                device->vdp_get_error_string (status)));
        return GST_FLOW_ERROR;
      }
      break;
    }
    case GST_MAKE_FOURCC ('I', '4', '2', '0'):
    {
      VdpStatus status;
      guint8 *data[3];
      guint32 stride[3];

      data[0] = GST_BUFFER_DATA (outbuf) +
          gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420,
          0, video_yuv->width, video_yuv->height);
      data[1] = GST_BUFFER_DATA (outbuf) +
          gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420,
          2, video_yuv->width, video_yuv->height);
      data[2] = GST_BUFFER_DATA (outbuf) +
          gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420,
          1, video_yuv->width, video_yuv->height);

      stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420,
          0, video_yuv->width);
      stride[1] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420,
          2, video_yuv->width);
      stride[2] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420,
          1, video_yuv->width);

      GST_LOG_OBJECT (video_yuv, "Entering vdp_video_surface_get_bits_ycbcr");
      status =
          device->vdp_video_surface_get_bits_ycbcr (surface,
          VDP_YCBCR_FORMAT_YV12, (void *) data, stride);
      GST_LOG_OBJECT (video_yuv,
          "Got status %d from vdp_video_surface_get_bits_ycbcr", status);
      if (G_UNLIKELY (status != VDP_STATUS_OK)) {
        GST_ELEMENT_ERROR (video_yuv, RESOURCE, READ,
            ("Couldn't get data from vdpau"),
            ("Error returned from vdpau was: %s",
                device->vdp_get_error_string (status)));
        return GST_FLOW_ERROR;
      }
      break;
    }
    case GST_MAKE_FOURCC ('N', 'V', '1', '2'):
    {
      VdpStatus status;
      guint8 *data[2];
      guint32 stride[2];

      data[0] = GST_BUFFER_DATA (outbuf);
      data[1] = GST_BUFFER_DATA (outbuf) + video_yuv->width * video_yuv->height;

      stride[0] = video_yuv->width;
      stride[1] = video_yuv->width;

      GST_LOG_OBJECT (video_yuv, "Entering vdp_video_surface_get_bits_ycbcr");
      status =
          device->vdp_video_surface_get_bits_ycbcr (surface,
          VDP_YCBCR_FORMAT_NV12, (void *) data, stride);
      GST_LOG_OBJECT (video_yuv,
          "Got status %d from vdp_video_surface_get_bits_ycbcr", status);
      if (G_UNLIKELY (status != VDP_STATUS_OK)) {
        GST_ELEMENT_ERROR (video_yuv, RESOURCE, READ,
            ("Couldn't get data from vdpau"),
            ("Error returned from vdpau was: %s",
                device->vdp_get_error_string (status)));
        return GST_FLOW_ERROR;
      }
      break;
    }
    case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
    {
      VdpStatus status;
      guint8 *data[1];
      guint32 stride[1];

      data[0] = GST_BUFFER_DATA (outbuf);

      stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_UYVY,
          0, video_yuv->width);

      GST_LOG_OBJECT (video_yuv, "Entering vdp_video_surface_get_bits_ycbcr");
      status =
          device->vdp_video_surface_get_bits_ycbcr (surface,
          VDP_YCBCR_FORMAT_UYVY, (void *) data, stride);
      GST_LOG_OBJECT (video_yuv,
          "Got status %d from vdp_video_surface_get_bits_ycbcr", status);
      if (G_UNLIKELY (status != VDP_STATUS_OK)) {
        GST_ELEMENT_ERROR (video_yuv, RESOURCE, READ,
            ("Couldn't get data from vdpau"),
            ("Error returned from vdpau was: %s",
                device->vdp_get_error_string (status)));
        return GST_FLOW_ERROR;
      }
      break;
    }
    case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
    {
      VdpStatus status;
      guint8 *data[1];
      guint32 stride[1];

      data[0] = GST_BUFFER_DATA (outbuf);

      stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YUY2,
          0, video_yuv->width);

      GST_LOG_OBJECT (video_yuv, "Entering vdp_video_surface_get_bits_ycbcr");
      status =
          device->vdp_video_surface_get_bits_ycbcr (surface,
          VDP_YCBCR_FORMAT_YUYV, (void *) data, stride);
      GST_LOG_OBJECT (video_yuv,
          "Got status %d from vdp_video_surface_get_bits_ycbcr", status);
      if (G_UNLIKELY (status != VDP_STATUS_OK)) {
        GST_ELEMENT_ERROR (video_yuv, RESOURCE, READ,
            ("Couldn't get data from vdpau"),
            ("Error returned from vdpau was: %s",
                device->vdp_get_error_string (status)));
        return GST_FLOW_ERROR;
      }
      break;
    }
    default:
      break;
  }

  gst_buffer_copy_metadata (outbuf, inbuf,
      GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS);

  GST_LOG_OBJECT (video_yuv, "Pushing buffer with ts %" GST_TIME_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)));

  return GST_FLOW_OK;
}
gboolean
gst_vdp_video_buffer_upload (GstVdpVideoBuffer * video_buf, GstBuffer * src_buf,
    guint fourcc, gint width, gint height)
{
  guint8 *data[3];
  guint32 stride[3];
  VdpYCbCrFormat format;
  GstVdpDevice *device;
  VdpStatus status;

  g_return_val_if_fail (GST_IS_VDP_VIDEO_BUFFER (video_buf), FALSE);
  g_return_val_if_fail (GST_IS_BUFFER (src_buf), FALSE);

  switch (fourcc) {
    case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
    {
      data[0] = GST_BUFFER_DATA (src_buf) +
          gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
          0, width, height);
      data[1] = GST_BUFFER_DATA (src_buf) +
          gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
          2, width, height);
      data[2] = GST_BUFFER_DATA (src_buf) +
          gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
          1, width, height);

      stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
          0, width);
      stride[1] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
          2, width);
      stride[2] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
          1, width);

      format = VDP_YCBCR_FORMAT_YV12;
      break;
    }
    case GST_MAKE_FOURCC ('I', '4', '2', '0'):
    {
      data[0] = GST_BUFFER_DATA (src_buf) +
          gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420,
          0, width, height);
      data[1] = GST_BUFFER_DATA (src_buf) +
          gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420,
          2, width, height);
      data[2] = GST_BUFFER_DATA (src_buf) +
          gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420,
          1, width, height);

      stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420,
          0, width);
      stride[1] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420,
          2, width);
      stride[2] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420,
          1, width);

      format = VDP_YCBCR_FORMAT_YV12;
      break;
    }
    case GST_MAKE_FOURCC ('N', 'V', '1', '2'):
    {
      data[0] = GST_BUFFER_DATA (src_buf);
      data[1] = GST_BUFFER_DATA (src_buf) + width * height;

      stride[0] = width;
      stride[1] = width;

      format = VDP_YCBCR_FORMAT_NV12;
      break;
    }
    case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
    {
      data[0] = GST_BUFFER_DATA (src_buf);

      stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_UYVY,
          0, width);

      format = VDP_YCBCR_FORMAT_UYVY;
      break;
    }
    case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
    {
      data[0] = GST_BUFFER_DATA (src_buf);

      stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YUY2,
          0, width);

      format = VDP_YCBCR_FORMAT_YUYV;
      break;
    }
    default:
      return FALSE;
  }

  device = video_buf->device;
  status = device->vdp_video_surface_put_bits_ycbcr (video_buf->surface, format,
      (void *) data, stride);
  if (G_UNLIKELY (status != VDP_STATUS_OK)) {
    GST_ERROR_OBJECT (video_buf, "Couldn't push YUV data to VDPAU, "
        "Error returned from vdpau was: %s",
        device->vdp_get_error_string (status));
    return FALSE;
  }

  return TRUE;
}