static GstFlowReturn
gst_cairo_overlay_transform_frame_ip (GstVideoFilter * vfilter,
    GstVideoFrame * frame)
{
  GstCairoOverlay *overlay = GST_CAIRO_OVERLAY (vfilter);
  cairo_surface_t *surface;
  cairo_t *cr;
  cairo_format_t format;

  switch (GST_VIDEO_FRAME_FORMAT (frame)) {
    case GST_VIDEO_FORMAT_ARGB:
    case GST_VIDEO_FORMAT_BGRA:
      format = CAIRO_FORMAT_ARGB32;
      break;
    case GST_VIDEO_FORMAT_xRGB:
    case GST_VIDEO_FORMAT_BGRx:
      format = CAIRO_FORMAT_RGB24;
      break;
    case GST_VIDEO_FORMAT_RGB16:
      format = CAIRO_FORMAT_RGB16_565;
      break;
    default:
    {
      GST_WARNING ("No matching cairo format for %s",
          gst_video_format_to_string (GST_VIDEO_FRAME_FORMAT (frame)));
      return GST_FLOW_ERROR;
    }
  }

  surface =
      cairo_image_surface_create_for_data (GST_VIDEO_FRAME_PLANE_DATA (frame,
          0), format, GST_VIDEO_FRAME_WIDTH (frame),
      GST_VIDEO_FRAME_HEIGHT (frame), GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0));

  if (G_UNLIKELY (!surface))
    return GST_FLOW_ERROR;

  cr = cairo_create (surface);
  if (G_UNLIKELY (!cr)) {
    cairo_surface_destroy (surface);
    return GST_FLOW_ERROR;
  }

  g_signal_emit (overlay, gst_cairo_overlay_signals[SIGNAL_DRAW], 0,
      cr, GST_BUFFER_PTS (frame->buffer), GST_BUFFER_DURATION (frame->buffer),
      NULL);

  cairo_destroy (cr);
  cairo_surface_destroy (surface);

  return GST_FLOW_OK;
}
Пример #2
0
static void
theora_enc_init_buffer (th_ycbcr_buffer buf, GstVideoFrame * frame)
{
    GstVideoInfo vinfo;
    guint i;

    /* According to Theora developer Timothy Terriberry, the Theora
     * encoder will not use memory outside of pic_width/height, even when
     * the frame size is bigger. The values outside this region will be encoded
     * to default values.
     * Due to this, setting the frame's width/height as the buffer width/height
     * is perfectly ok, even though it does not strictly look ok.
     */

    gst_video_info_init (&vinfo);
    gst_video_info_set_format (&vinfo, GST_VIDEO_FRAME_FORMAT (frame),
                               GST_ROUND_UP_16 (GST_VIDEO_FRAME_WIDTH (frame)),
                               GST_ROUND_UP_16 (GST_VIDEO_FRAME_HEIGHT (frame)));

    for (i = 0; i < 3; i++) {
        buf[i].width = GST_VIDEO_INFO_COMP_WIDTH (&vinfo, i);
        buf[i].height = GST_VIDEO_INFO_COMP_HEIGHT (&vinfo, i);
        buf[i].data = GST_VIDEO_FRAME_COMP_DATA (frame, i);
        buf[i].stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
    }
}
Пример #3
0
static GstFlowReturn
gst_geometric_transform_transform_frame (GstVideoFilter * vfilter,
    GstVideoFrame * in_frame, GstVideoFrame * out_frame)
{
  GstGeometricTransform *gt;
  GstGeometricTransformClass *klass;
  gint x, y;
  GstFlowReturn ret = GST_FLOW_OK;
  gdouble *ptr;
  guint8 *in_data;
  guint8 *out_data;

  gt = GST_GEOMETRIC_TRANSFORM_CAST (vfilter);
  klass = GST_GEOMETRIC_TRANSFORM_GET_CLASS (gt);

  in_data = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
  out_data = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);

  if (GST_VIDEO_FRAME_FORMAT (out_frame) == GST_VIDEO_FORMAT_AYUV) {
    /* in AYUV black is not just all zeros:
     * 0x10 is black for Y,
     * 0x80 is black for Cr and Cb */
    for (int i = 0; i < out_frame->map[0].size; i += 4)
      GST_WRITE_UINT32_BE (out_data + i, 0xff108080);
  } else {
    memset (out_data, 0, out_frame->map[0].size);
  }

  GST_OBJECT_LOCK (gt);
  if (gt->precalc_map) {
    if (gt->needs_remap) {
      if (klass->prepare_func)
        if (!klass->prepare_func (gt)) {
          ret = FALSE;
          goto end;
        }
      gst_geometric_transform_generate_map (gt);
    }
    g_return_val_if_fail (gt->map, GST_FLOW_ERROR);
    ptr = gt->map;
    for (y = 0; y < gt->height; y++) {
      for (x = 0; x < gt->width; x++) {
        /* do the mapping */
        gst_geometric_transform_do_map (gt, in_data, out_data, x, y, ptr[0],
            ptr[1]);
        ptr += 2;
      }
    }
  } else {
    for (y = 0; y < gt->height; y++) {
      for (x = 0; x < gt->width; x++) {
        gdouble in_x, in_y;

        if (klass->map_func (gt, x, y, &in_x, &in_y)) {
          gst_geometric_transform_do_map (gt, in_data, out_data, x, y, in_x,
              in_y);
        } else {
          GST_WARNING_OBJECT (gt, "Failed to do mapping for %d %d", x, y);
          ret = GST_FLOW_ERROR;
          goto end;
        }
      }
    }
  }
end:
  GST_OBJECT_UNLOCK (gt);
  return ret;
}
Пример #4
0
/**
 * gst_gl_filter_filter_texture:
 * @filter: a #GstGLFilter
 * @inbuf: an input buffer
 * @outbuf: an output buffer
 *
 * Perform automatic upload if needed, call filter_texture vfunc and then an
 * automatic download if needed.
 *
 * Returns: whether the transformation succeeded
 */
gboolean
gst_gl_filter_filter_texture (GstGLFilter * filter, GstBuffer * inbuf,
    GstBuffer * outbuf)
{
  GstGLFilterClass *filter_class;
  guint in_tex, out_tex;
  GstVideoFrame out_frame;
  gboolean ret, out_gl_mem;
  GstVideoGLTextureUploadMeta *out_tex_upload_meta;

  filter_class = GST_GL_FILTER_GET_CLASS (filter);

  if (!gst_gl_upload_perform_with_buffer (filter->upload, inbuf, &in_tex))
    return FALSE;

  if (!gst_video_frame_map (&out_frame, &filter->out_info, outbuf,
          GST_MAP_WRITE | GST_MAP_GL)) {
    ret = FALSE;
    goto inbuf_error;
  }

  out_gl_mem = gst_is_gl_memory (out_frame.map[0].memory);
  out_tex_upload_meta = gst_buffer_get_video_gl_texture_upload_meta (outbuf);

  if (out_gl_mem) {
    out_tex = *(guint *) out_frame.data[0];
  } else {
    GST_LOG ("Output Buffer does not contain correct memory, "
        "attempting to wrap for download");

    if (!filter->download) {
      filter->download = gst_gl_download_new (filter->context);

      if (!gst_gl_download_init_format (filter->download,
              GST_VIDEO_FRAME_FORMAT (&out_frame),
              GST_VIDEO_FRAME_WIDTH (&out_frame),
              GST_VIDEO_FRAME_HEIGHT (&out_frame))) {
        GST_ELEMENT_ERROR (filter, RESOURCE, NOT_FOUND,
            ("%s", "Failed to init download format"), (NULL));
        ret = FALSE;
        goto error;
      }
    }
    out_tex = filter->out_tex_id;
  }

  GST_DEBUG ("calling filter_texture with textures in:%i out:%i", in_tex,
      out_tex);

  g_assert (filter_class->filter_texture);
  ret = filter_class->filter_texture (filter, in_tex, out_tex);

  if (!out_gl_mem && !out_tex_upload_meta) {
    if (!gst_gl_download_perform_with_data (filter->download, out_tex,
            out_frame.data)) {
      GST_ELEMENT_ERROR (filter, RESOURCE, NOT_FOUND,
          ("%s", "Failed to download video frame"), (NULL));
      ret = FALSE;
      goto error;
    }
  }

error:
  gst_video_frame_unmap (&out_frame);
inbuf_error:
  gst_gl_upload_release_buffer (filter->upload);

  return ret;
}
Пример #5
0
/* this function does the actual processing
 */
static GstFlowReturn
gst_rgb_to_yuv_transform_frame (GstVideoFilter *filter, GstVideoFrame *in_frame, GstVideoFrame *out_frame)
{
  GstRgbToYuv *rgbtoyuv = GST_RGBTOYUV_CAST (filter);
  gint width, height, stride;
  gint y_stride, uv_stride;
  guint32 *in_data;
  guint8 *y_out, *u_out, *v_out;

  width = GST_VIDEO_FRAME_WIDTH (in_frame);
  height = GST_VIDEO_FRAME_HEIGHT (in_frame);
  stride = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0);

  in_data = (guint32*) GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);

  y_stride = GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0);
  uv_stride = GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 1);

  y_out = (guint8*) GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
  u_out = (guint8*) GST_VIDEO_FRAME_PLANE_DATA (out_frame, 1);
  v_out = (guint8*) GST_VIDEO_FRAME_PLANE_DATA (out_frame, 2);

  GST_INFO ("DEBUG_INFO: rgbtoyuv::transform_frame: ");
  GST_INFO ("in stride: %d; out stride: %d %d\n", stride, y_stride, uv_stride);

  switch (GST_VIDEO_FRAME_FORMAT (in_frame)) {
    case GST_VIDEO_FORMAT_RGBA:
    case GST_VIDEO_FORMAT_BGRA:
    GST_INFO("DEBUG_INFO: rgbtoyuv input format RGBA.\n");
    libyuv::RGBAToI420 ((guint8 *)(in_data), stride,
                y_out, y_stride,
                u_out, uv_stride,
                v_out, uv_stride,
                width, height);
    break;

    case GST_VIDEO_FORMAT_ARGB:
    case GST_VIDEO_FORMAT_ABGR:
    GST_INFO ("DEBUG_INFO: rgbtoyuv input format ARGB.\n");
    libyuv::ARGBToI420 ((guint8 *)(in_data), stride,
                y_out, y_stride,
                u_out, uv_stride,
                v_out, uv_stride,
                width, height);
    break;

    case GST_VIDEO_FORMAT_RGB16:
    GST_INFO("DEBUG_INFO: rgbtoyuv input format RGB16.\n");
    libyuv::RGB565ToI420 ((guint8 *)(in_data), stride,
                y_out, y_stride,
                u_out, uv_stride,
                v_out, uv_stride,
                width, height);
    break;

    default:
    return GST_FLOW_ERROR;
    break;
  }

  return GST_FLOW_OK;
}
Пример #6
0
static GstFlowReturn
gst_video_test_src_fill (GstPushSrc * psrc, GstBuffer * buffer)
{
  GstVideoTestSrc *src;
  GstClockTime next_time;
  GstVideoFrame frame;
  gconstpointer pal;
  gsize palsize;

  src = GST_VIDEO_TEST_SRC (psrc);

  if (G_UNLIKELY (GST_VIDEO_INFO_FORMAT (&src->info) ==
          GST_VIDEO_FORMAT_UNKNOWN))
    goto not_negotiated;

  /* 0 framerate and we are at the second frame, eos */
  if (G_UNLIKELY (src->info.fps_n == 0 && src->n_frames == 1))
    goto eos;

  GST_LOG_OBJECT (src,
      "creating buffer from pool for frame %d", (gint) src->n_frames);

  if (!gst_video_frame_map (&frame, &src->info, buffer, GST_MAP_WRITE))
    goto invalid_frame;

  src->make_image (src, &frame);

  if ((pal = gst_video_format_get_palette (GST_VIDEO_FRAME_FORMAT (&frame),
              &palsize))) {
    memcpy (GST_VIDEO_FRAME_PLANE_DATA (&frame, 1), pal, palsize);
  }

  gst_video_frame_unmap (&frame);

  GST_BUFFER_DTS (buffer) =
      src->accum_rtime + src->timestamp_offset + src->running_time;
  GST_BUFFER_PTS (buffer) = GST_BUFFER_DTS (buffer);

  GST_DEBUG_OBJECT (src, "Timestamp: %" GST_TIME_FORMAT " = accumulated %"
      GST_TIME_FORMAT " + offset: %"
      GST_TIME_FORMAT " + running time: %" GST_TIME_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_PTS (buffer)), GST_TIME_ARGS (src->accum_rtime),
      GST_TIME_ARGS (src->timestamp_offset), GST_TIME_ARGS (src->running_time));

  GST_BUFFER_OFFSET (buffer) = src->accum_frames + src->n_frames;
  src->n_frames++;
  GST_BUFFER_OFFSET_END (buffer) = GST_BUFFER_OFFSET (buffer) + 1;
  if (src->info.fps_n) {
    next_time = gst_util_uint64_scale_int (src->n_frames * GST_SECOND,
        src->info.fps_d, src->info.fps_n);
    GST_BUFFER_DURATION (buffer) = next_time - src->running_time;
  } else {
    next_time = src->timestamp_offset;
    /* NONE means forever */
    GST_BUFFER_DURATION (buffer) = GST_CLOCK_TIME_NONE;
  }

  src->running_time = next_time;

  return GST_FLOW_OK;

not_negotiated:
  {
    GST_ELEMENT_ERROR (src, CORE, NEGOTIATION, (NULL),
        ("format wasn't negotiated before get function"));
    return GST_FLOW_NOT_NEGOTIATED;
  }
eos:
  {
    GST_DEBUG_OBJECT (src, "eos: 0 framerate, frame %d", (gint) src->n_frames);
    return GST_FLOW_EOS;
  }
invalid_frame:
  {
    GST_DEBUG_OBJECT (src, "invalid frame");
    return GST_FLOW_OK;
  }
}