Example #1
0
static GstFlowReturn
gst_pixbufscale_transform_frame (GstVideoFilter * filter,
    GstVideoFrame * in, GstVideoFrame * out)
{
  GstPixbufScale *scale;
  GdkPixbuf *src_pixbuf, *dest_pixbuf;

  scale = GST_PIXBUFSCALE (filter);

  src_pixbuf =
      gdk_pixbuf_new_from_data (GST_VIDEO_FRAME_COMP_DATA (in, 0),
      GDK_COLORSPACE_RGB, FALSE, 8, GST_VIDEO_FRAME_WIDTH (in),
      GST_VIDEO_FRAME_HEIGHT (in),
      GST_VIDEO_FRAME_COMP_STRIDE (in, 0), NULL, NULL);

  dest_pixbuf =
      gdk_pixbuf_new_from_data (GST_VIDEO_FRAME_COMP_DATA (out, 0),
      GDK_COLORSPACE_RGB, FALSE, 8, GST_VIDEO_FRAME_WIDTH (out),
      GST_VIDEO_FRAME_HEIGHT (out),
      GST_VIDEO_FRAME_COMP_STRIDE (out, 0), NULL, NULL);

  gdk_pixbuf_scale (src_pixbuf, dest_pixbuf, 0, 0,
      GST_VIDEO_FRAME_WIDTH (out),
      GST_VIDEO_FRAME_HEIGHT (out), 0, 0,
      (double) GST_VIDEO_FRAME_WIDTH (out) / GST_VIDEO_FRAME_WIDTH (in),
      (double) GST_VIDEO_FRAME_HEIGHT (out) / GST_VIDEO_FRAME_HEIGHT (in),
      scale->gdk_method);

  g_object_unref (src_pixbuf);
  g_object_unref (dest_pixbuf);

  return GST_FLOW_OK;
}
static void
fill_frame_packed8_3 (GstVideoFrame * frame, opj_image_t * image)
{
  gint x, y, w, h;
  guint8 *data_out, *tmp;
  const gint *data_in[3];
  gint dstride;

  w = GST_VIDEO_FRAME_WIDTH (frame);
  h = GST_VIDEO_FRAME_HEIGHT (frame);
  data_out = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
  dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);

  data_in[0] = image->comps[0].data;
  data_in[1] = image->comps[1].data;
  data_in[2] = image->comps[2].data;

  for (y = 0; y < h; y++) {
    tmp = data_out;

    for (x = 0; x < w; x++) {
      tmp[1] = *data_in[0];
      tmp[2] = *data_in[1];
      tmp[3] = *data_in[2];

      tmp += 4;
      data_in[0]++;
      data_in[1]++;
      data_in[2]++;
    }
    data_out += dstride;
  }
}
static void
gst_video_crop_transform_packed_simple (GstVideoCrop * vcrop,
    GstVideoFrame * in_frame, GstVideoFrame * out_frame)
{
  guint8 *in_data, *out_data;
  gint width, height;
  guint i, dx;
  gint in_stride, out_stride;

  width = GST_VIDEO_FRAME_WIDTH (out_frame);
  height = GST_VIDEO_FRAME_HEIGHT (out_frame);

  in_data = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
  out_data = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);

  in_stride = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0);
  out_stride = GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0);

  in_data += vcrop->crop_top * in_stride;
  in_data += vcrop->crop_left * GST_VIDEO_FRAME_COMP_PSTRIDE (in_frame, 0);

  dx = width * GST_VIDEO_FRAME_COMP_PSTRIDE (out_frame, 0);

  for (i = 0; i < height; ++i) {
    memcpy (out_data, in_data, dx);
    in_data += in_stride;
    out_data += out_stride;
  }
}
Example #4
0
/* this function does the actual processing
 */
static GstFlowReturn
gst_yuv_to_rgb_transform_frame (GstVideoFilter *filter, GstVideoFrame *in_frame, GstVideoFrame *out_frame)
{
  GstYuvToRgb *rgbtoyuv = GST_YUVTORGB_CAST (filter);
  gint width, height, stride;
  gint y_stride, uv_stride;
  guint32 *out_data;
  guint8 *y_in, *u_in, *v_in;

  y_stride = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0);
  uv_stride = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 1);

  y_in = (guint8*) GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
  u_in = (guint8*) GST_VIDEO_FRAME_PLANE_DATA (in_frame, 1);
  v_in = (guint8*) GST_VIDEO_FRAME_PLANE_DATA (in_frame, 2);

  width = GST_VIDEO_FRAME_WIDTH (out_frame);
  height = GST_VIDEO_FRAME_HEIGHT (out_frame);
  stride = GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0);

  out_data = (guint32*) GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);

  // GST_INFO ("DEBUG_INFO: rgbtoyuv::transform_frame: ");
  // GST_INFO ("in stride: %d; out stride: %d %d\n", stride, y_stride, uv_stride);

  libyuv::I420ToARGB (y_in, y_stride,
              u_in, uv_stride,
              v_in, uv_stride,
              (guint8*)out_data, stride,
              width, height);

  return GST_FLOW_OK;
}
Example #5
0
static void
gst_smpte_blend_i420 (GstVideoFrame * frame1, GstVideoFrame * frame2,
    GstVideoFrame * oframe, GstMask * mask, gint border, gint pos)
{
  guint32 *maskp;
  gint value;
  gint i, j;
  gint min, max;
  guint8 *in1, *in2, *out, *in1u, *in1v, *in2u, *in2v, *outu, *outv;
  gint width, height;

  if (border == 0)
    border++;

  min = pos - border;
  max = pos;

  width = GST_VIDEO_FRAME_WIDTH (frame1);
  height = GST_VIDEO_FRAME_HEIGHT (frame1);

  in1 = GST_VIDEO_FRAME_COMP_DATA (frame1, 0);
  in2 = GST_VIDEO_FRAME_COMP_DATA (frame2, 0);
  out = GST_VIDEO_FRAME_COMP_DATA (oframe, 0);

  in1u = GST_VIDEO_FRAME_COMP_DATA (frame1, 1);
  in1v = GST_VIDEO_FRAME_COMP_DATA (frame1, 2);
  in2u = GST_VIDEO_FRAME_COMP_DATA (frame2, 1);
  in2v = GST_VIDEO_FRAME_COMP_DATA (frame2, 2);
  outu = GST_VIDEO_FRAME_COMP_DATA (oframe, 1);
  outv = GST_VIDEO_FRAME_COMP_DATA (oframe, 2);

  maskp = mask->data;

  for (i = 0; i < height; i++) {
    for (j = 0; j < width; j++) {
      value = *maskp++;
      value = ((CLAMP (value, min, max) - min) << 8) / border;

      out[j] = ((in1[j] * value) + (in2[j] * (256 - value))) >> 8;
      if (!(i & 1) && !(j & 1)) {
        outu[j / 2] =
            ((in1u[j / 2] * value) + (in2u[j / 2] * (256 - value))) >> 8;
        outv[j / 2] =
            ((in1v[j / 2] * value) + (in2v[j / 2] * (256 - value))) >> 8;
      }
    }

    in1 += GST_VIDEO_FRAME_COMP_STRIDE (frame1, 0);
    in2 += GST_VIDEO_FRAME_COMP_STRIDE (frame2, 0);
    out += GST_VIDEO_FRAME_COMP_STRIDE (oframe, 0);

    if (!(i & 1)) {
      in1u += GST_VIDEO_FRAME_COMP_STRIDE (frame1, 1);
      in2u += GST_VIDEO_FRAME_COMP_STRIDE (frame2, 1);
      in1v += GST_VIDEO_FRAME_COMP_STRIDE (frame1, 2);
      in2v += GST_VIDEO_FRAME_COMP_STRIDE (frame1, 2);
      outu += GST_VIDEO_FRAME_COMP_STRIDE (oframe, 1);
      outv += GST_VIDEO_FRAME_COMP_STRIDE (oframe, 2);
    }
  }
static void
fill_image_packed8_3 (opj_image_t * image, GstVideoFrame * frame)
{
  gint x, y, w, h;
  const guint8 *data_in, *tmp;
  gint *data_out[3];
  gint sstride;

  w = GST_VIDEO_FRAME_WIDTH (frame);
  h = GST_VIDEO_FRAME_HEIGHT (frame);
  data_in = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
  sstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);

  data_out[0] = image->comps[0].data;
  data_out[1] = image->comps[1].data;
  data_out[2] = image->comps[2].data;

  for (y = 0; y < h; y++) {
    tmp = data_in;

    for (x = 0; x < w; x++) {
      *data_out[0] = tmp[1];
      *data_out[1] = tmp[2];
      *data_out[2] = tmp[3];

      tmp += 4;
      data_out[0]++;
      data_out[1]++;
      data_out[2]++;
    }
    data_in += sstride;
  }
}
static void
fill_frame_planar16_1 (GstVideoFrame * frame, opj_image_t * image)
{
  gint x, y, w, h;
  guint16 *data_out, *tmp;
  const gint *data_in;
  gint dstride;
  gint shift;

  w = GST_VIDEO_FRAME_WIDTH (frame);
  h = GST_VIDEO_FRAME_HEIGHT (frame);
  data_out = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
  dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0) / 2;

  data_in = image->comps[0].data;

  shift = 16 - image->comps[0].prec;

  for (y = 0; y < h; y++) {
    tmp = data_out;

    for (x = 0; x < w; x++) {
      *tmp = *data_in << shift;

      tmp++;
      data_in++;
    }
    data_out += dstride;
  }
}
Example #8
0
static void
theora_enc_init_buffer (th_ycbcr_buffer buf, GstVideoFrame * frame)
{
    GstVideoInfo vinfo;
    guint i;

    /* According to Theora developer Timothy Terriberry, the Theora
     * encoder will not use memory outside of pic_width/height, even when
     * the frame size is bigger. The values outside this region will be encoded
     * to default values.
     * Due to this, setting the frame's width/height as the buffer width/height
     * is perfectly ok, even though it does not strictly look ok.
     */

    gst_video_info_init (&vinfo);
    gst_video_info_set_format (&vinfo, GST_VIDEO_FRAME_FORMAT (frame),
                               GST_ROUND_UP_16 (GST_VIDEO_FRAME_WIDTH (frame)),
                               GST_ROUND_UP_16 (GST_VIDEO_FRAME_HEIGHT (frame)));

    for (i = 0; i < 3; i++) {
        buf[i].width = GST_VIDEO_INFO_COMP_WIDTH (&vinfo, i);
        buf[i].height = GST_VIDEO_INFO_COMP_HEIGHT (&vinfo, i);
        buf[i].data = GST_VIDEO_FRAME_COMP_DATA (frame, i);
        buf[i].stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
    }
}
Example #9
0
static void
gst_video_balance_packed_rgb (GstVideoBalance * videobalance,
    GstVideoFrame * frame)
{
  gint i, j, height;
  gint width, stride, row_wrap;
  gint pixel_stride;
  guint8 *data;
  gint offsets[3];
  gint r, g, b;
  gint y, u, v;
  gint u_tmp, v_tmp;
  guint8 *tabley = videobalance->tabley;
  guint8 **tableu = videobalance->tableu;
  guint8 **tablev = videobalance->tablev;

  width = GST_VIDEO_FRAME_WIDTH (frame);
  height = GST_VIDEO_FRAME_HEIGHT (frame);

  offsets[0] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 0);
  offsets[1] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 1);
  offsets[2] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 2);

  data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
  stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);

  pixel_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
  row_wrap = stride - pixel_stride * width;

  for (i = 0; i < height; i++) {
    for (j = 0; j < width; j++) {
      r = data[offsets[0]];
      g = data[offsets[1]];
      b = data[offsets[2]];

      y = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 0, r, g, b);
      u_tmp = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 1, r, g, b);
      v_tmp = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 2, r, g, b);

      y = CLAMP (y, 0, 255);
      u_tmp = CLAMP (u_tmp, 0, 255);
      v_tmp = CLAMP (v_tmp, 0, 255);

      y = tabley[y];
      u = tableu[u_tmp][v_tmp];
      v = tablev[u_tmp][v_tmp];

      r = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 0, y, u, v);
      g = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 1, y, u, v);
      b = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 2, y, u, v);

      data[offsets[0]] = CLAMP (r, 0, 255);
      data[offsets[1]] = CLAMP (g, 0, 255);
      data[offsets[2]] = CLAMP (b, 0, 255);
      data += pixel_stride;
    }
    data += row_wrap;
  }
}
Example #10
0
static void
gst_video_balance_packed_yuv (GstVideoBalance * videobalance,
    GstVideoFrame * frame)
{
  gint x, y, stride;
  guint8 *ydata, *udata, *vdata;
  gint yoff, uoff, voff;
  gint width, height;
  gint width2, height2;
  guint8 *tabley = videobalance->tabley;
  guint8 **tableu = videobalance->tableu;
  guint8 **tablev = videobalance->tablev;

  width = GST_VIDEO_FRAME_WIDTH (frame);
  height = GST_VIDEO_FRAME_HEIGHT (frame);

  stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
  ydata = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
  yoff = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);

  for (y = 0; y < height; y++) {
    guint8 *yptr;

    yptr = ydata + y * stride;
    for (x = 0; x < width; x++) {
      *yptr = tabley[*yptr];
      yptr += yoff;
    }
  }

  width2 = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1);
  height2 = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1);

  udata = GST_VIDEO_FRAME_COMP_DATA (frame, 1);
  vdata = GST_VIDEO_FRAME_COMP_DATA (frame, 2);
  uoff = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 1);
  voff = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 2);

  for (y = 0; y < height2; y++) {
    guint8 *uptr, *vptr;
    guint8 u1, v1;

    uptr = udata + y * stride;
    vptr = vdata + y * stride;

    for (x = 0; x < width2; x++) {
      u1 = *uptr;
      v1 = *vptr;

      *uptr = tableu[u1][v1];
      *vptr = tablev[u1][v1];

      uptr += uoff;
      vptr += voff;
    }
  }
}
Example #11
0
static void
gst_video_balance_semiplanar_yuv (GstVideoBalance * videobalance,
    GstVideoFrame * frame)
{
  gint x, y;
  guint8 *ydata;
  guint8 *uvdata;
  gint ystride, uvstride;
  gint width, height;
  gint width2, height2;
  guint8 *tabley = videobalance->tabley;
  guint8 **tableu = videobalance->tableu;
  guint8 **tablev = videobalance->tablev;
  gint upos, vpos;

  width = GST_VIDEO_FRAME_WIDTH (frame);
  height = GST_VIDEO_FRAME_HEIGHT (frame);

  ydata = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
  ystride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);

  for (y = 0; y < height; y++) {
    guint8 *yptr;

    yptr = ydata + y * ystride;
    for (x = 0; x < width; x++) {
      *yptr = tabley[*yptr];
      yptr++;
    }
  }

  width2 = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1);
  height2 = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1);

  uvdata = GST_VIDEO_FRAME_PLANE_DATA (frame, 1);
  uvstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 1);

  upos = GST_VIDEO_INFO_FORMAT (&frame->info) == GST_VIDEO_FORMAT_NV12 ? 0 : 1;
  vpos = GST_VIDEO_INFO_FORMAT (&frame->info) == GST_VIDEO_FORMAT_NV12 ? 1 : 0;

  for (y = 0; y < height2; y++) {
    guint8 *uvptr;
    guint8 u1, v1;

    uvptr = uvdata + y * uvstride;

    for (x = 0; x < width2; x++) {
      u1 = uvptr[upos];
      v1 = uvptr[vpos];

      uvptr[upos] = tableu[u1][v1];
      uvptr[vpos] = tablev[u1][v1];
      uvptr += 2;
    }
  }
}
static opj_image_t *
gst_openjpeg_enc_fill_image (GstOpenJPEGEnc * self, GstVideoFrame * frame)
{
  gint i, ncomps;
  opj_image_cmptparm_t *comps;
  OPJ_COLOR_SPACE colorspace;
  opj_image_t *image;

  ncomps = GST_VIDEO_FRAME_N_COMPONENTS (frame);
  comps = g_new0 (opj_image_cmptparm_t, ncomps);

  for (i = 0; i < ncomps; i++) {
    comps[i].prec = GST_VIDEO_FRAME_COMP_DEPTH (frame, i);
    comps[i].bpp = GST_VIDEO_FRAME_COMP_DEPTH (frame, i);
    comps[i].sgnd = 0;
    comps[i].w = GST_VIDEO_FRAME_COMP_WIDTH (frame, i);
    comps[i].h = GST_VIDEO_FRAME_COMP_HEIGHT (frame, i);
    comps[i].dx =
        GST_VIDEO_FRAME_WIDTH (frame) / GST_VIDEO_FRAME_COMP_WIDTH (frame, i);
    comps[i].dy =
        GST_VIDEO_FRAME_HEIGHT (frame) / GST_VIDEO_FRAME_COMP_HEIGHT (frame, i);
  }

  if ((frame->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_YUV))
    colorspace = OPJ_CLRSPC_SYCC;
  else if ((frame->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_RGB))
    colorspace = OPJ_CLRSPC_SRGB;
  else if ((frame->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_GRAY))
    colorspace = OPJ_CLRSPC_GRAY;
  else
    g_return_val_if_reached (NULL);

  image = opj_image_create (ncomps, comps, colorspace);
  g_free (comps);

  image->x0 = image->y0 = 0;
  image->x1 = GST_VIDEO_FRAME_WIDTH (frame);
  image->y1 = GST_VIDEO_FRAME_HEIGHT (frame);

  self->fill_image (image, frame);

  return image;
}
static void
gst_color_effects_transform_rgb (GstColorEffects * filter,
    GstVideoFrame * frame)
{
  gint i, j;
  gint width, height;
  gint pixel_stride, row_stride, row_wrap;
  guint32 r, g, b;
  guint32 luma;
  gint offsets[3];
  guint8 *data;

  data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
  offsets[0] = GST_VIDEO_FRAME_COMP_POFFSET (frame, 0);
  offsets[1] = GST_VIDEO_FRAME_COMP_POFFSET (frame, 1);
  offsets[2] = GST_VIDEO_FRAME_COMP_POFFSET (frame, 2);

  width = GST_VIDEO_FRAME_WIDTH (frame);
  height = GST_VIDEO_FRAME_HEIGHT (frame);

  row_stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
  pixel_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
  row_wrap = row_stride - pixel_stride * width;

  /* transform */

  for (i = 0; i < height; i++) {
    for (j = 0; j < width; j++) {
      r = data[offsets[0]];
      g = data[offsets[1]];
      b = data[offsets[2]];
      if (filter->map_luma) {
        /* BT. 709 coefficients in B8 fixed point */
        /* 0.2126 R + 0.7152 G + 0.0722 B */
        luma = ((r << 8) * 54) + ((g << 8) * 183) + ((b << 8) * 19);
        luma >>= 16;            /* get integer part */
        luma *= 3;              /* times 3 to retrieve the correct pixel from
                                 * the lut */
        /* map luma to lookup table */
        /* src.luma |-> table[luma].rgb */
        data[offsets[0]] = filter->table[luma];
        data[offsets[1]] = filter->table[luma + 1];
        data[offsets[2]] = filter->table[luma + 2];
      } else {
        /* map each color component to the correspondent lut color */
        /* src.r |-> table[r].r */
        /* src.g |-> table[g].g */
        /* src.b |-> table[b].b */
        data[offsets[0]] = filter->table[r * 3];
        data[offsets[1]] = filter->table[g * 3 + 1];
        data[offsets[2]] = filter->table[b * 3 + 2];
      }
      data += pixel_stride;
    }
static void
gst_video_crop_transform_packed_complex (GstVideoCrop * vcrop,
    GstVideoFrame * in_frame, GstVideoFrame * out_frame)
{
  guint8 *in_data, *out_data;
  guint i, dx;
  gint width, height;
  gint in_stride;
  gint out_stride;

  width = GST_VIDEO_FRAME_WIDTH (out_frame);
  height = GST_VIDEO_FRAME_HEIGHT (out_frame);

  in_data = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
  out_data = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);

  in_stride = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0);
  out_stride = GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0);

  in_data += vcrop->crop_top * in_stride;

  /* rounding down here so we end up at the start of a macro-pixel and not
   * in the middle of one */
  in_data += ROUND_DOWN_2 (vcrop->crop_left) *
      GST_VIDEO_FRAME_COMP_PSTRIDE (in_frame, 0);

  dx = width * GST_VIDEO_FRAME_COMP_PSTRIDE (out_frame, 0);

  /* UYVY = 4:2:2 - [U0 Y0 V0 Y1] [U2 Y2 V2 Y3] [U4 Y4 V4 Y5]
   * YUYV = 4:2:2 - [Y0 U0 Y1 V0] [Y2 U2 Y3 V2] [Y4 U4 Y5 V4] = YUY2 */
  if ((vcrop->crop_left % 2) != 0) {
    for (i = 0; i < height; ++i) {
      gint j;

      memcpy (out_data, in_data, dx);

      /* move just the Y samples one pixel to the left, don't worry about
       * chroma shift */
      for (j = vcrop->macro_y_off; j < out_stride - 2; j += 2)
        out_data[j] = in_data[j + 2];

      in_data += in_stride;
      out_data += out_stride;
    }
  } else {
    for (i = 0; i < height; ++i) {
      memcpy (out_data, in_data, dx);
      in_data += in_stride;
      out_data += out_stride;
    }
  }
}
Example #15
0
static void
gst_jpeg_dec_decode_rgb (GstJpegDec * dec, GstVideoFrame * frame)
{
  guchar *r_rows[16], *g_rows[16], *b_rows[16];
  guchar **scanarray[3] = { r_rows, g_rows, b_rows };
  gint i, j, k;
  gint lines;
  guint8 *base[3];
  guint pstride, rstride;
  gint width, height;

  GST_DEBUG_OBJECT (dec, "indirect decoding of RGB");

  width = GST_VIDEO_FRAME_WIDTH (frame);
  height = GST_VIDEO_FRAME_HEIGHT (frame);

  if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
    return;

  for (i = 0; i < 3; i++)
    base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);

  pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
  rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);

  memcpy (r_rows, dec->idr_y, 16 * sizeof (gpointer));
  memcpy (g_rows, dec->idr_u, 16 * sizeof (gpointer));
  memcpy (b_rows, dec->idr_v, 16 * sizeof (gpointer));

  i = 0;
  while (i < height) {
    lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
    if (G_LIKELY (lines > 0)) {
      for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
        gint p;

        p = 0;
        for (k = 0; k < width; k++) {
          base[0][p] = r_rows[j][k];
          base[1][p] = g_rows[j][k];
          base[2][p] = b_rows[j][k];
          p += pstride;
        }
        base[0] += rstride;
        base[1] += rstride;
        base[2] += rstride;
      }
    } else {
      GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
    }
  }
}
static GstFlowReturn
gst_cairo_overlay_transform_frame_ip (GstVideoFilter * vfilter,
    GstVideoFrame * frame)
{
  GstCairoOverlay *overlay = GST_CAIRO_OVERLAY (vfilter);
  cairo_surface_t *surface;
  cairo_t *cr;
  cairo_format_t format;

  switch (GST_VIDEO_FRAME_FORMAT (frame)) {
    case GST_VIDEO_FORMAT_ARGB:
    case GST_VIDEO_FORMAT_BGRA:
      format = CAIRO_FORMAT_ARGB32;
      break;
    case GST_VIDEO_FORMAT_xRGB:
    case GST_VIDEO_FORMAT_BGRx:
      format = CAIRO_FORMAT_RGB24;
      break;
    case GST_VIDEO_FORMAT_RGB16:
      format = CAIRO_FORMAT_RGB16_565;
      break;
    default:
    {
      GST_WARNING ("No matching cairo format for %s",
          gst_video_format_to_string (GST_VIDEO_FRAME_FORMAT (frame)));
      return GST_FLOW_ERROR;
    }
  }

  surface =
      cairo_image_surface_create_for_data (GST_VIDEO_FRAME_PLANE_DATA (frame,
          0), format, GST_VIDEO_FRAME_WIDTH (frame),
      GST_VIDEO_FRAME_HEIGHT (frame), GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0));

  if (G_UNLIKELY (!surface))
    return GST_FLOW_ERROR;

  cr = cairo_create (surface);
  if (G_UNLIKELY (!cr)) {
    cairo_surface_destroy (surface);
    return GST_FLOW_ERROR;
  }

  g_signal_emit (overlay, gst_cairo_overlay_signals[SIGNAL_DRAW], 0,
      cr, GST_BUFFER_PTS (frame->buffer), GST_BUFFER_DURATION (frame->buffer),
      NULL);

  cairo_destroy (cr);
  cairo_surface_destroy (surface);

  return GST_FLOW_OK;
}
Example #17
0
static GstFlowReturn
gst_shagadelictv_transform_frame (GstVideoFilter * vfilter,
                                  GstVideoFrame * in_frame, GstVideoFrame * out_frame)
{
    GstShagadelicTV *filter = GST_SHAGADELICTV (vfilter);
    guint32 *src, *dest;
    gint x, y;
    guint32 v;
    guint8 r, g, b;
    gint width, height;

    src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
    dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);

    width = GST_VIDEO_FRAME_WIDTH (in_frame);
    height = GST_VIDEO_FRAME_HEIGHT (in_frame);

    for (y = 0; y < height; y++) {
        for (x = 0; x < width; x++) {
            v = *src++ | 0x1010100;
            v = (v - 0x707060) & 0x1010100;
            v -= v >> 8;
            /* Try another Babe!
             * v = *src++;
             * *dest++ = v & ((r<<16)|(g<<8)|b);
             */
            r = ((gint8) (filter->ripple[(filter->ry + y) * width * 2 + filter->rx +
                                         x] + filter->phase * 2)) >> 7;
            g = ((gint8) (filter->spiral[y * width + x] + filter->phase * 3)) >> 7;
            b = ((gint8) (filter->ripple[(filter->by + y) * width * 2 + filter->bx +
                                         x] - filter->phase)) >> 7;
            *dest++ = v & ((r << 16) | (g << 8) | b);
        }
    }

    filter->phase -= 8;
    if ((filter->rx + filter->rvx) < 0 || (filter->rx + filter->rvx) >= width)
        filter->rvx = -filter->rvx;
    if ((filter->ry + filter->rvy) < 0 || (filter->ry + filter->rvy) >= height)
        filter->rvy = -filter->rvy;
    if ((filter->bx + filter->bvx) < 0 || (filter->bx + filter->bvx) >= width)
        filter->bvx = -filter->bvx;
    if ((filter->by + filter->bvy) < 0 || (filter->by + filter->bvy) >= height)
        filter->bvy = -filter->bvy;
    filter->rx += filter->rvx;
    filter->ry += filter->rvy;
    filter->bx += filter->bvx;
    filter->by += filter->bvy;

    return GST_FLOW_OK;
}
static void
gst_video_crop_transform_planar (GstVideoCrop * vcrop,
    GstVideoFrame * in_frame, GstVideoFrame * out_frame)
{
  gint width, height;
  guint8 *y_out, *u_out, *v_out;
  guint8 *y_in, *u_in, *v_in;
  guint i, dx;

  width = GST_VIDEO_FRAME_WIDTH (out_frame);
  height = GST_VIDEO_FRAME_HEIGHT (out_frame);

  /* Y plane */
  y_in = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
  y_out = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);

  y_in +=
      (vcrop->crop_top * GST_VIDEO_FRAME_PLANE_STRIDE (in_frame,
          0)) + vcrop->crop_left;
  dx = width;

  for (i = 0; i < height; ++i) {
    memcpy (y_out, y_in, dx);
    y_in += GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0);
    y_out += GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0);
  }

  /* U + V planes */
  u_in = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 1);
  u_out = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 1);

  u_in += (vcrop->crop_top / 2) * GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 1);
  u_in += vcrop->crop_left / 2;

  v_in = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 2);
  v_out = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 2);

  v_in += (vcrop->crop_top / 2) * GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 2);
  v_in += vcrop->crop_left / 2;

  dx = GST_ROUND_UP_2 (width) / 2;

  for (i = 0; i < GST_ROUND_UP_2 (height) / 2; ++i) {
    memcpy (u_out, u_in, dx);
    memcpy (v_out, v_in, dx);
    u_in += GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 1);
    u_out += GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 1);
    v_in += GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 2);
    v_out += GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 2);
  }
}
static GstFlowReturn
gst_video_median_transform_frame (GstVideoFilter * filter,
    GstVideoFrame * in_frame, GstVideoFrame * out_frame)
{
  GstVideoMedian *median = GST_VIDEO_MEDIAN (filter);

  if (median->filtersize == 5) {
    median_5 (GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0),
        GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0),
        GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0),
        GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0),
        GST_VIDEO_FRAME_WIDTH (in_frame), GST_VIDEO_FRAME_HEIGHT (in_frame));

    if (median->lum_only) {
      gst_video_frame_copy_plane (out_frame, in_frame, 1);
      gst_video_frame_copy_plane (out_frame, in_frame, 2);
    } else {
      median_5 (GST_VIDEO_FRAME_PLANE_DATA (out_frame, 1),
          GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 1),
          GST_VIDEO_FRAME_PLANE_DATA (in_frame, 1),
          GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 1),
          GST_VIDEO_FRAME_WIDTH (in_frame) / 2,
          GST_VIDEO_FRAME_HEIGHT (in_frame) / 2);
      median_5 (GST_VIDEO_FRAME_PLANE_DATA (out_frame, 2),
          GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 2),
          GST_VIDEO_FRAME_PLANE_DATA (in_frame, 2),
          GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 2),
          GST_VIDEO_FRAME_WIDTH (in_frame) / 2,
          GST_VIDEO_FRAME_HEIGHT (in_frame) / 2);
    }
  } else {
    median_9 (GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0),
        GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0),
        GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0),
        GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0),
        GST_VIDEO_FRAME_WIDTH (in_frame), GST_VIDEO_FRAME_HEIGHT (in_frame));

    if (median->lum_only) {
      gst_video_frame_copy_plane (out_frame, in_frame, 1);
      gst_video_frame_copy_plane (out_frame, in_frame, 2);
    } else {
      median_9 (GST_VIDEO_FRAME_PLANE_DATA (out_frame, 1),
          GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 1),
          GST_VIDEO_FRAME_PLANE_DATA (in_frame, 1),
          GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 1),
          GST_VIDEO_FRAME_WIDTH (in_frame) / 2,
          GST_VIDEO_FRAME_HEIGHT (in_frame) / 2);
      median_9 (GST_VIDEO_FRAME_PLANE_DATA (out_frame, 2),
          GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 2),
          GST_VIDEO_FRAME_PLANE_DATA (in_frame, 2),
          GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 2),
          GST_VIDEO_FRAME_WIDTH (in_frame) / 2,
          GST_VIDEO_FRAME_HEIGHT (in_frame) / 2);
    }
  }

  return GST_FLOW_OK;
}
Example #20
0
static void
fill_frame_planar16_4_generic (GstVideoFrame * frame, opj_image_t * image)
{
  gint x, y, w, h;
  guint16 *data_out, *tmp;
  const gint *data_in[4];
  gint dstride;
  gint dx[4], dy[4], shift[4];

  w = GST_VIDEO_FRAME_WIDTH (frame);
  h = GST_VIDEO_FRAME_HEIGHT (frame);
  data_out = (guint16 *) GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
  dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0) / 2;

  data_in[0] = image->comps[0].data;
  data_in[1] = image->comps[1].data;
  data_in[2] = image->comps[2].data;
  data_in[3] = image->comps[3].data;

  dx[0] = image->comps[0].dx;
  dx[1] = image->comps[1].dx;
  dx[2] = image->comps[2].dx;
  dx[3] = image->comps[3].dx;

  dy[0] = image->comps[0].dy;
  dy[1] = image->comps[1].dy;
  dy[2] = image->comps[2].dy;
  dy[3] = image->comps[3].dy;

  shift[0] = 16 - image->comps[0].prec;
  shift[1] = 16 - image->comps[1].prec;
  shift[2] = 16 - image->comps[2].prec;
  shift[3] = 16 - image->comps[3].prec;

  for (y = 0; y < h; y++) {
    tmp = data_out;

    for (x = 0; x < w; x++) {
      tmp[0] = data_in[3][((y / dy[3]) * w + x) / dx[3]] << shift[3];
      tmp[1] = data_in[0][((y / dy[0]) * w + x) / dx[0]] << shift[0];
      tmp[2] = data_in[1][((y / dy[1]) * w + x) / dx[1]] << shift[1];
      tmp[3] = data_in[2][((y / dy[2]) * w + x) / dx[2]] << shift[2];
      tmp += 4;
    }
    data_out += dstride;
  }
}
Example #21
0
static GstFlowReturn
gst_streaktv_transform_frame (GstVideoFilter * vfilter,
    GstVideoFrame * in_frame, GstVideoFrame * out_frame)
{
  GstStreakTV *filter = GST_STREAKTV (vfilter);
  guint32 *src, *dest;
  gint i, cf;
  gint video_area, width, height;
  guint32 **planetable = filter->planetable;
  gint plane = filter->plane;
  guint stride_mask, stride_shift, stride;

  src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
  dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);

  width = GST_VIDEO_FRAME_WIDTH (in_frame);
  height = GST_VIDEO_FRAME_HEIGHT (in_frame);

  video_area = width * height;

  GST_OBJECT_LOCK (filter);
  if (filter->feedback) {
    stride_mask = 0xfcfcfcfc;
    stride = 8;
    stride_shift = 2;
  } else {
    stride_mask = 0xf8f8f8f8;
    stride = 4;
    stride_shift = 3;
  }

  for (i = 0; i < video_area; i++) {
    planetable[plane][i] = (src[i] & stride_mask) >> stride_shift;
  }

  cf = plane & (stride - 1);
  if (filter->feedback) {
    for (i = 0; i < video_area; i++) {
      dest[i] = planetable[cf][i]
          + planetable[cf + stride][i]
          + planetable[cf + stride * 2][i]
          + planetable[cf + stride * 3][i];
      planetable[plane][i] = (dest[i] & stride_mask) >> stride_shift;
    }
  } else {
    for (i = 0; i < video_area; i++) {
Example #22
0
static void
gst_smpte_alpha_process_ayuv_ayuv (GstSMPTEAlpha * smpte,
    const GstVideoFrame * in_frame, GstVideoFrame * out_frame, GstMask * mask,
    gint border, gint pos)
{
  gint i, j;
  const guint32 *maskp;
  gint value;
  gint min, max;
  gint width, height;
  guint8 *in, *out;
  gint src_wrap, dest_wrap;

  if (border == 0)
    border++;

  min = pos - border;
  max = pos;
  GST_DEBUG_OBJECT (smpte, "pos %d, min %d, max %d, border %d", pos, min, max,
      border);

  maskp = mask->data;

  width = GST_VIDEO_FRAME_WIDTH (out_frame);
  height = GST_VIDEO_FRAME_HEIGHT (out_frame);

  in = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
  out = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
  src_wrap = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0) - (width << 2);
  dest_wrap = GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0) - (width << 2);

  /* we basically copy the source to dest but we scale the alpha channel with
   * the mask */
  for (i = 0; i < height; i++) {
    for (j = 0; j < width; j++) {
      value = *maskp++;
      *out++ = (*in++ * ((CLAMP (value, min, max) - min) << 8) / border) >> 8;
      *out++ = *in++;
      *out++ = *in++;
      *out++ = *in++;
    }
    in += src_wrap;
    out += dest_wrap;
  }
}
Example #23
0
/* Actual processing. */
static GstFlowReturn
gst_burn_transform_frame (GstVideoFilter * vfilter,
    GstVideoFrame * in_frame, GstVideoFrame * out_frame)
{
  GstBurn *filter = GST_BURN (vfilter);
  gint video_size, adjustment, width, height;
  guint32 *src, *dest;
  GstClockTime timestamp;
  gint64 stream_time;

  src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
  dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);

  width = GST_VIDEO_FRAME_WIDTH (in_frame);
  height = GST_VIDEO_FRAME_HEIGHT (in_frame);

  video_size = width * height;

  /* GstController: update the properties */
  timestamp = GST_BUFFER_TIMESTAMP (in_frame->buffer);
  stream_time =
      gst_segment_to_stream_time (&GST_BASE_TRANSFORM (filter)->segment,
      GST_FORMAT_TIME, timestamp);

  GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
      GST_TIME_ARGS (timestamp));

  if (GST_CLOCK_TIME_IS_VALID (stream_time))
    gst_object_sync_values (GST_OBJECT (filter), stream_time);

  GST_OBJECT_LOCK (filter);
  adjustment = filter->adjustment;
  GST_OBJECT_UNLOCK (filter);

  /*** Now the image processing work.... ***/
  orc_gaudi_burn (dest, src, adjustment, video_size);

  return GST_FLOW_OK;
}
Example #24
0
static void
fill_frame_planar8_3_generic (GstVideoFrame * frame, opj_image_t * image)
{
  gint x, y, w, h;
  guint8 *data_out, *tmp;
  const gint *data_in[3];
  gint dstride;
  gint dx[3], dy[3];

  w = GST_VIDEO_FRAME_WIDTH (frame);
  h = GST_VIDEO_FRAME_HEIGHT (frame);
  data_out = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
  dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);

  data_in[0] = image->comps[0].data;
  data_in[1] = image->comps[1].data;
  data_in[2] = image->comps[2].data;

  dx[0] = image->comps[0].dx;
  dx[1] = image->comps[1].dx;
  dx[2] = image->comps[2].dx;

  dy[0] = image->comps[0].dy;
  dy[1] = image->comps[1].dy;
  dy[2] = image->comps[2].dy;

  for (y = 0; y < h; y++) {
    tmp = data_out;

    for (x = 0; x < w; x++) {
      tmp[0] = 0xff;
      tmp[1] = data_in[0][((y / dy[0]) * w + x) / dx[0]];
      tmp[2] = data_in[1][((y / dy[1]) * w + x) / dx[1]];
      tmp[3] = data_in[2][((y / dy[2]) * w + x) / dx[2]];
      tmp += 4;
    }
    data_out += dstride;
  }
}
Example #25
0
static GstFlowReturn
gst_mfc_dec_fill_outbuf (GstMFCDec * self, GstBuffer * outbuf,
    struct mfc_buffer *mfc_outbuf, GstVideoCodecState * state)
{
  GstFlowReturn ret = GST_FLOW_OK;
  const guint8 *mfc_outbuf_comps[3] = { NULL, };
  gint i, j, h, w, src_stride, dst_stride;
  guint8 *dst_, *src_;
  GstVideoFrame vframe;
  Fimc *fimc = self->fimc;
  gboolean zerocopy, has_cropping;

  memset (&vframe, 0, sizeof (vframe));

  zerocopy = TRUE;
  /* FIXME: Not 100% correct, we need the memory of each
   * plane to be contiguous at least */
  if (GST_VIDEO_INFO_N_PLANES (&state->info) > gst_buffer_n_memory (outbuf)) {
    zerocopy = FALSE;
  } else {
    gint n = gst_buffer_n_memory (outbuf);

    for (i = 0; i < n; i++) {
      GstMemory *mem = gst_buffer_peek_memory (outbuf, i);

      if (!GST_MEMORY_IS_PHYSICALLY_CONTIGUOUS (mem)) {
        zerocopy = FALSE;
        break;
      }
    }
  }

  has_cropping = self->has_cropping && (self->width != self->crop_width
      || self->height != self->crop_height);

  /* We only do cropping if we do zerocopy and downstream
   * supports cropping. For non-zerocopy we can do cropping
   * more efficient.
   * We can't do cropping ourself with zerocopy because
   * FIMC returns EFAULT when queueing the destination
   * buffers
   */
  if (zerocopy && has_cropping) {
    GstVideoCropMeta *crop;

    crop = gst_buffer_add_video_crop_meta (outbuf);
    crop->x = self->crop_left;
    crop->y = self->crop_top;
    crop->width = self->crop_width;
    crop->height = self->crop_height;
  }

  if (!gst_video_frame_map (&vframe, &state->info, outbuf, GST_MAP_WRITE))
    goto frame_map_error;

  mfc_buffer_get_output_data (mfc_outbuf, (void **) &mfc_outbuf_comps[0],
      (void **) &mfc_outbuf_comps[1]);

  if (zerocopy && (has_cropping || (self->width == self->crop_width
              && self->height == self->crop_height))) {
    void *dst[3];

    if (self->mmap || !self->fimc) {
      if (!gst_mfc_dec_create_fimc (self, state))
        goto fimc_create_error;

      fimc = self->fimc;

      if (self->format == GST_VIDEO_FORMAT_NV12) {
        self->dst_stride[0] = GST_ROUND_UP_4 (self->width);
        self->dst_stride[1] = GST_ROUND_UP_4 (self->width);
        self->dst_stride[2] = 0;
      } else {
        self->dst_stride[0] = GST_ROUND_UP_4 (self->width);
        self->dst_stride[1] = GST_ROUND_UP_4 ((self->width + 1) / 2);
        self->dst_stride[2] = GST_ROUND_UP_4 ((self->width + 1) / 2);
      }

      if (has_cropping) {
        if (fimc_set_dst_format (fimc, self->fimc_format, self->width,
                self->height, self->dst_stride, 0, 0, self->width,
                self->height) < 0)
          goto fimc_dst_error;
      } else {
        if (fimc_set_dst_format (fimc, self->fimc_format, self->width,
                self->height, self->dst_stride, self->crop_left,
                self->crop_top, self->crop_width, self->crop_height) < 0)
          goto fimc_dst_error;
      }
      self->mmap = FALSE;

      if (fimc_request_dst_buffers (fimc) < 0)
        goto fimc_dst_requestbuffers_error;

      self->dst[0] = NULL;
      self->dst[1] = NULL;
      self->dst[2] = NULL;
    }

    dst[0] = GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
    dst[1] = GST_VIDEO_FRAME_PLANE_DATA (&vframe, 1);
    if (self->format == GST_VIDEO_FORMAT_NV12)
      dst[2] = NULL;
    else
      dst[2] = GST_VIDEO_FRAME_PLANE_DATA (&vframe, 2);

    if (fimc_convert (fimc, (void **) mfc_outbuf_comps, (void **) dst) < 0)
      goto fimc_convert_error;
  } else {
    if (!self->mmap || !self->fimc) {
      if (!gst_mfc_dec_create_fimc (self, state))
        goto fimc_create_error;

      self->dst_stride[0] = 0;
      self->dst_stride[1] = 0;
      self->dst_stride[2] = 0;
      self->mmap = TRUE;
      fimc = self->fimc;
    }

    if (!self->dst[0]) {
      if (fimc_set_dst_format (fimc, self->fimc_format, self->width,
              self->height, self->dst_stride, self->crop_left,
              self->crop_top, self->crop_width, self->crop_height) < 0)
        goto fimc_dst_error;

      if (fimc_request_dst_buffers_mmap (fimc, self->dst, self->dst_stride) < 0)
        goto fimc_dst_requestbuffers_error;
    }

    if (fimc_convert (fimc, (void **) mfc_outbuf_comps,
            (void **) self->dst) < 0)
      goto fimc_convert_error;

    switch (state->info.finfo->format) {
      case GST_VIDEO_FORMAT_RGBx:
        dst_ = (guint8 *) GST_VIDEO_FRAME_COMP_DATA (&vframe, 0);
        src_ = self->dst[0];
        src_stride = self->dst_stride[0];
        h = GST_VIDEO_FRAME_HEIGHT (&vframe);
        w = GST_VIDEO_FRAME_WIDTH (&vframe);
        dst_stride = GST_VIDEO_FRAME_COMP_STRIDE (&vframe, 0);
        for (i = 0; i < h; i++) {
          memcpy (dst_, src_, w);
          dst_ += dst_stride;
          src_ += src_stride;
        }
        break;
      case GST_VIDEO_FORMAT_I420:
      case GST_VIDEO_FORMAT_YV12:
        for (j = 0; j < 3; j++) {
          dst_ = (guint8 *) GST_VIDEO_FRAME_COMP_DATA (&vframe, j);
          src_ = self->dst[j];
          src_stride = self->dst_stride[j];
          h = GST_VIDEO_FRAME_COMP_HEIGHT (&vframe, j);
          w = GST_VIDEO_FRAME_COMP_WIDTH (&vframe, j);
          dst_stride = GST_VIDEO_FRAME_COMP_STRIDE (&vframe, j);
          for (i = 0; i < h; i++) {
            memcpy (dst_, src_, w);
            dst_ += dst_stride;
            src_ += src_stride;
          }
        }
        break;
      case GST_VIDEO_FORMAT_NV12:
        for (j = 0; j < 2; j++) {
          dst_ = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, j);
          src_ = self->dst[j];
          src_stride = self->dst_stride[j];
          h = GST_VIDEO_FRAME_COMP_HEIGHT (&vframe, j);
          w = GST_VIDEO_FRAME_COMP_WIDTH (&vframe, j) * (j == 0 ? 1 : 2);
          dst_stride = GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, j);
          for (i = 0; i < h; i++) {
            memcpy (dst_, src_, w);
            dst_ += dst_stride;
            src_ += src_stride;
          }
        }
        break;
      default:
        g_assert_not_reached ();
        break;
    }
  }

done:
  if (vframe.buffer)
    gst_video_frame_unmap (&vframe);

  return ret;

frame_map_error:
  {
    GST_ELEMENT_ERROR (self, CORE, FAILED, ("Failed to map output buffer"),
        (NULL));
    ret = GST_FLOW_ERROR;
    goto done;
  }

fimc_create_error:
  {
    ret = GST_FLOW_ERROR;
    goto done;
  }

fimc_dst_error:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, FAILED,
        ("Failed to set FIMC destination parameters"), (NULL));
    ret = GST_FLOW_ERROR;
    goto done;
  }

fimc_dst_requestbuffers_error:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, FAILED,
        ("Failed to request FIMC destination buffers"), (NULL));
    ret = GST_FLOW_ERROR;
    goto done;
  }

fimc_convert_error:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, FAILED,
        ("Failed to convert via FIMC"), (NULL));
    ret = GST_FLOW_ERROR;
    goto done;
  }
}
Example #26
0
static void
gst_jpeg_dec_decode_indirect (GstJpegDec * dec, GstVideoFrame * frame, gint r_v,
    gint r_h, gint comp)
{
  guchar *y_rows[16], *u_rows[16], *v_rows[16];
  guchar **scanarray[3] = { y_rows, u_rows, v_rows };
  gint i, j, k;
  gint lines;
  guchar *base[3], *last[3];
  gint stride[3];
  gint width, height;

  GST_DEBUG_OBJECT (dec,
      "unadvantageous width or r_h, taking slow route involving memcpy");

  width = GST_VIDEO_FRAME_WIDTH (frame);
  height = GST_VIDEO_FRAME_HEIGHT (frame);

  if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
    return;

  for (i = 0; i < 3; i++) {
    base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
    stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
    /* make sure we don't make jpeglib write beyond our buffer,
     * which might happen if (height % (r_v*DCTSIZE)) != 0 */
    last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
        (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
  }

  memcpy (y_rows, dec->idr_y, 16 * sizeof (gpointer));
  memcpy (u_rows, dec->idr_u, 16 * sizeof (gpointer));
  memcpy (v_rows, dec->idr_v, 16 * sizeof (gpointer));

  /* fill chroma components for grayscale */
  if (comp == 1) {
    GST_DEBUG_OBJECT (dec, "grayscale, filling chroma");
    for (i = 0; i < 16; i++) {
      memset (u_rows[i], GST_ROUND_UP_32 (width), 0x80);
      memset (v_rows[i], GST_ROUND_UP_32 (width), 0x80);
    }
  }

  for (i = 0; i < height; i += r_v * DCTSIZE) {
    lines = jpeg_read_raw_data (&dec->cinfo, scanarray, r_v * DCTSIZE);
    if (G_LIKELY (lines > 0)) {
      for (j = 0, k = 0; j < (r_v * DCTSIZE); j += r_v, k++) {
        if (G_LIKELY (base[0] <= last[0])) {
          memcpy (base[0], y_rows[j], stride[0]);
          base[0] += stride[0];
        }
        if (r_v == 2) {
          if (G_LIKELY (base[0] <= last[0])) {
            memcpy (base[0], y_rows[j + 1], stride[0]);
            base[0] += stride[0];
          }
        }
        if (G_LIKELY (base[1] <= last[1] && base[2] <= last[2])) {
          if (r_h == 2) {
            memcpy (base[1], u_rows[k], stride[1]);
            memcpy (base[2], v_rows[k], stride[2]);
          } else if (r_h == 1) {
            hresamplecpy1 (base[1], u_rows[k], stride[1]);
            hresamplecpy1 (base[2], v_rows[k], stride[2]);
          } else {
            /* FIXME: implement (at least we avoid crashing by doing nothing) */
          }
        }

        if (r_v == 2 || (k & 1) != 0) {
          base[1] += stride[1];
          base[2] += stride[2];
        }
      }
    } else {
      GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
    }
  }
}
static GstFlowReturn
gst_decklink_video_sink_prepare (GstBaseSink * bsink, GstBuffer * buffer)
{
  GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
  GstVideoFrame vframe;
  IDeckLinkMutableVideoFrame *frame;
  guint8 *outdata, *indata;
  GstFlowReturn flow_ret;
  HRESULT ret;
  GstClockTime timestamp, duration;
  GstClockTime running_time, running_time_duration;
  gint i;
  GstClock *clock;

  GST_DEBUG_OBJECT (self, "Preparing buffer %p", buffer);

  // FIXME: Handle no timestamps
  if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
    return GST_FLOW_ERROR;
  }

  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  duration = GST_BUFFER_DURATION (buffer);
  if (duration == GST_CLOCK_TIME_NONE) {
    duration =
        gst_util_uint64_scale_int (GST_SECOND, self->info.fps_d,
        self->info.fps_n);
  }
  running_time =
      gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
      GST_FORMAT_TIME, timestamp);
  running_time_duration =
      gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
      GST_FORMAT_TIME, timestamp + duration) - running_time;

  // FIXME: https://bugzilla.gnome.org/show_bug.cgi?id=742916
  // We need to drop late buffers here immediately instead of
  // potentially overflowing the internal queue of the hardware
  clock = gst_element_get_clock (GST_ELEMENT_CAST (self));
  if (clock) {
    GstClockTime clock_running_time, base_time, clock_time, latency,
        max_lateness;

    base_time = gst_element_get_base_time (GST_ELEMENT_CAST (self));
    clock_time = gst_clock_get_time (clock);
    if (base_time != GST_CLOCK_TIME_NONE && clock_time != GST_CLOCK_TIME_NONE) {
      clock_running_time = clock_time - base_time;
      latency = gst_base_sink_get_latency (GST_BASE_SINK_CAST (self));
      max_lateness = gst_base_sink_get_max_lateness (GST_BASE_SINK_CAST (self));

      if (clock_running_time >
          running_time + running_time_duration + latency + max_lateness) {
        GST_DEBUG_OBJECT (self,
            "Late buffer: %" GST_TIME_FORMAT " > %" GST_TIME_FORMAT,
            GST_TIME_ARGS (clock_running_time),
            GST_TIME_ARGS (running_time + running_time_duration));

        if (self->last_render_time == GST_CLOCK_TIME_NONE
            || (self->last_render_time < clock_running_time
                && clock_running_time - self->last_render_time >= GST_SECOND)) {
          GST_DEBUG_OBJECT (self,
              "Rendering frame nonetheless because we had none for more than 1s");
          running_time = clock_running_time;
          running_time_duration = 0;
        } else {
          GST_WARNING_OBJECT (self, "Dropping frame");
          gst_object_unref (clock);
          return GST_FLOW_OK;
        }
      }
    }

    gst_object_unref (clock);
  }
  self->last_render_time = running_time;

  ret = self->output->output->CreateVideoFrame (self->info.width,
      self->info.height, self->info.stride[0], bmdFormat8BitYUV,
      bmdFrameFlagDefault, &frame);
  if (ret != S_OK) {
    GST_ELEMENT_ERROR (self, STREAM, FAILED,
        (NULL), ("Failed to create video frame: 0x%08x", ret));
    return GST_FLOW_ERROR;
  }

  if (!gst_video_frame_map (&vframe, &self->info, buffer, GST_MAP_READ)) {
    GST_ERROR_OBJECT (self, "Failed to map video frame");
    flow_ret = GST_FLOW_ERROR;
    goto out;
  }

  frame->GetBytes ((void **) &outdata);
  indata = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
  for (i = 0; i < self->info.height; i++) {
    memcpy (outdata, indata, GST_VIDEO_FRAME_WIDTH (&vframe) * 2);
    indata += GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0);
    outdata += frame->GetRowBytes ();
  }
  gst_video_frame_unmap (&vframe);

  convert_to_internal_clock (self, &running_time, &running_time_duration);

  GST_LOG_OBJECT (self, "Scheduling video frame %p at %" GST_TIME_FORMAT
      " with duration %" GST_TIME_FORMAT, frame, GST_TIME_ARGS (running_time),
      GST_TIME_ARGS (running_time_duration));

  ret = self->output->output->ScheduleVideoFrame (frame,
      running_time, running_time_duration, GST_SECOND);
  if (ret != S_OK) {
    GST_ELEMENT_ERROR (self, STREAM, FAILED,
        (NULL), ("Failed to schedule frame: 0x%08x", ret));
    flow_ret = GST_FLOW_ERROR;
    goto out;
  }

  flow_ret = GST_FLOW_OK;

out:

  frame->Release ();

  return flow_ret;
}
Example #28
0
static void
gst_validate_ssim_save_out (GstValidateSsim * self, GstBuffer * buffer,
    const gchar * ref_file, const gchar * file, const gchar * outfolder)
{
  GstVideoFrame frame, converted;

  if (!g_file_test (outfolder, G_FILE_TEST_IS_DIR)) {
    if (g_mkdir_with_parents (outfolder, 0755) != 0) {

      GST_VALIDATE_REPORT (self, GENERAL_INPUT_ERROR,
          "Could not create output directory %s", outfolder);
      return;
    }
  }

  if (self->priv->outconverter_info.converter == NULL ||
      self->priv->width != self->priv->outconverter_info.out_info.width ||
      self->priv->height != self->priv->outconverter_info.out_info.height) {

    if (self->priv->outconverter_info.converter)
      gst_video_converter_free (self->priv->outconverter_info.converter);

    gst_video_info_init (&self->priv->outconverter_info.in_info);
    gst_video_info_set_format (&self->priv->outconverter_info.in_info,
        GST_VIDEO_FORMAT_GRAY8, self->priv->width, self->priv->height);

    gst_video_info_init (&self->priv->outconverter_info.out_info);
    gst_video_info_set_format (&self->priv->outconverter_info.out_info,
        GST_VIDEO_FORMAT_RGBx, self->priv->width, self->priv->height);

    self->priv->outconverter_info.converter =
        gst_video_converter_new (&self->priv->outconverter_info.in_info,
        &self->priv->outconverter_info.out_info, NULL);
  }

  if (!gst_video_frame_map (&frame, &self->priv->outconverter_info.in_info,
          buffer, GST_MAP_READ)) {
    GST_VALIDATE_REPORT (self, GENERAL_INPUT_ERROR,
        "Could not map output frame");

    return;
  }

  if (gst_validate_ssim_convert (self, &self->priv->outconverter_info,
          &frame, &converted)) {
    cairo_status_t status;
    cairo_surface_t *surface;
    gchar *bn1 = g_path_get_basename (ref_file);
    gchar *bn2 = g_path_get_basename (file);
    gchar *fname = g_strdup_printf ("%s.VS.%s.result.png", bn1, bn2);
    gchar *outfile = g_build_path (G_DIR_SEPARATOR_S, outfolder, fname, NULL);

    surface =
        cairo_image_surface_create_for_data (GST_VIDEO_FRAME_PLANE_DATA
        (&converted, 0), CAIRO_FORMAT_RGB24, GST_VIDEO_FRAME_WIDTH (&converted),
        GST_VIDEO_FRAME_HEIGHT (&converted),
        GST_VIDEO_FRAME_PLANE_STRIDE (&converted, 0));

    if ((status = cairo_surface_write_to_png (surface, outfile)) !=
        CAIRO_STATUS_SUCCESS) {
      GST_VALIDATE_REPORT (self, GENERAL_INPUT_ERROR,
          "Could not save '%s', cairo status is '%s'", outfile,
          cairo_status_to_string (status));
    }

    cairo_surface_destroy (surface);
    gst_video_frame_unmap (&frame);
    gst_video_frame_unmap (&converted);
    g_free (bn1);
    g_free (bn2);
    g_free (fname);
    g_free (outfile);
  }
}
static GstFlowReturn
gst_decklink_video_sink_prepare (GstBaseSink * bsink, GstBuffer * buffer)
{
  GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
  GstVideoFrame vframe;
  IDeckLinkMutableVideoFrame *frame;
  guint8 *outdata, *indata;
  GstFlowReturn flow_ret;
  HRESULT ret;
  GstClockTime timestamp, duration;
  GstClockTime running_time, running_time_duration;
  GstClockTime latency, render_delay;
  GstClockTimeDiff ts_offset;
  gint i;
  GstDecklinkVideoFormat caps_format;
  BMDPixelFormat format;
  gint bpp;
  GstVideoTimeCodeMeta *tc_meta;

  GST_DEBUG_OBJECT (self, "Preparing buffer %p", buffer);

  // FIXME: Handle no timestamps
  if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
    return GST_FLOW_ERROR;
  }

  caps_format = gst_decklink_type_from_video_format (self->info.finfo->format);
  format = gst_decklink_pixel_format_from_type (caps_format);
  bpp = gst_decklink_bpp_from_type (caps_format);

  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  duration = GST_BUFFER_DURATION (buffer);
  if (duration == GST_CLOCK_TIME_NONE) {
    duration =
        gst_util_uint64_scale_int (GST_SECOND, self->info.fps_d,
        self->info.fps_n);
  }
  running_time =
      gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
      GST_FORMAT_TIME, timestamp);
  running_time_duration =
      gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
      GST_FORMAT_TIME, timestamp + duration) - running_time;

  /* See gst_base_sink_adjust_time() */
  latency = gst_base_sink_get_latency (bsink);
  render_delay = gst_base_sink_get_render_delay (bsink);
  ts_offset = gst_base_sink_get_ts_offset (bsink);

  running_time += latency;

  if (ts_offset < 0) {
    ts_offset = -ts_offset;
    if ((GstClockTime) ts_offset < running_time)
      running_time -= ts_offset;
    else
      running_time = 0;
  } else {
    running_time += ts_offset;
  }

  if (running_time > render_delay)
    running_time -= render_delay;
  else
    running_time = 0;

  ret = self->output->output->CreateVideoFrame (self->info.width,
      self->info.height, self->info.stride[0], format, bmdFrameFlagDefault,
      &frame);
  if (ret != S_OK) {
    GST_ELEMENT_ERROR (self, STREAM, FAILED,
        (NULL), ("Failed to create video frame: 0x%08x", ret));
    return GST_FLOW_ERROR;
  }

  if (!gst_video_frame_map (&vframe, &self->info, buffer, GST_MAP_READ)) {
    GST_ERROR_OBJECT (self, "Failed to map video frame");
    flow_ret = GST_FLOW_ERROR;
    goto out;
  }

  frame->GetBytes ((void **) &outdata);
  indata = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
  for (i = 0; i < self->info.height; i++) {
    memcpy (outdata, indata, GST_VIDEO_FRAME_WIDTH (&vframe) * bpp);
    indata += GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0);
    outdata += frame->GetRowBytes ();
  }
  gst_video_frame_unmap (&vframe);

  tc_meta = gst_buffer_get_video_time_code_meta (buffer);
  if (tc_meta) {
    BMDTimecodeFlags bflags = (BMDTimecodeFlags) 0;
    gchar *tc_str;

    if (((GstVideoTimeCodeFlags) (tc_meta->tc.
                config.flags)) & GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME)
      bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeIsDropFrame);
    else
      bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeFlagDefault);
    if (tc_meta->tc.field_count == 2)
      bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeFieldMark);

    tc_str = gst_video_time_code_to_string (&tc_meta->tc);
    ret = frame->SetTimecodeFromComponents (self->timecode_format,
        (uint8_t) tc_meta->tc.hours,
        (uint8_t) tc_meta->tc.minutes,
        (uint8_t) tc_meta->tc.seconds, (uint8_t) tc_meta->tc.frames, bflags);
    if (ret != S_OK) {
      GST_ERROR_OBJECT (self,
          "Failed to set timecode %s to video frame: 0x%08x", tc_str, ret);
      flow_ret = GST_FLOW_ERROR;
      g_free (tc_str);
      goto out;
    }
    GST_DEBUG_OBJECT (self, "Set frame timecode to %s", tc_str);
    g_free (tc_str);
  }

  convert_to_internal_clock (self, &running_time, &running_time_duration);

  GST_LOG_OBJECT (self, "Scheduling video frame %p at %" GST_TIME_FORMAT
      " with duration %" GST_TIME_FORMAT, frame, GST_TIME_ARGS (running_time),
      GST_TIME_ARGS (running_time_duration));

  ret = self->output->output->ScheduleVideoFrame (frame,
      running_time, running_time_duration, GST_SECOND);
  if (ret != S_OK) {
    GST_ELEMENT_ERROR (self, STREAM, FAILED,
        (NULL), ("Failed to schedule frame: 0x%08x", ret));
    flow_ret = GST_FLOW_ERROR;
    goto out;
  }

  flow_ret = GST_FLOW_OK;

out:

  frame->Release ();

  return flow_ret;
}
Example #30
0
static GstFlowReturn
gst_rsvg_decode_image (GstRsvgDec * rsvg, GstBuffer * buffer,
    GstVideoCodecFrame * frame)
{
  GstVideoDecoder *decoder = GST_VIDEO_DECODER (rsvg);
  GstFlowReturn ret = GST_FLOW_OK;
  cairo_t *cr;
  cairo_surface_t *surface;
  RsvgHandle *handle;
  GError *error = NULL;
  RsvgDimensionData dimension;
  gdouble scalex, scaley;
  GstMapInfo minfo;
  GstVideoFrame vframe;
  GstVideoCodecState *output_state;

  GST_LOG_OBJECT (rsvg, "parsing svg");

  if (!gst_buffer_map (buffer, &minfo, GST_MAP_READ)) {
    GST_ERROR_OBJECT (rsvg, "Failed to get SVG image");
    return GST_FLOW_ERROR;
  }
  handle = rsvg_handle_new_from_data (minfo.data, minfo.size, &error);
  if (!handle) {
    GST_ERROR_OBJECT (rsvg, "Failed to parse SVG image: %s", error->message);
    g_error_free (error);
    return GST_FLOW_ERROR;
  }

  rsvg_handle_get_dimensions (handle, &dimension);

  output_state = gst_video_decoder_get_output_state (decoder);
  if ((output_state == NULL)
      || GST_VIDEO_INFO_WIDTH (&output_state->info) != dimension.width
      || GST_VIDEO_INFO_HEIGHT (&output_state->info) != dimension.height) {

    /* Create the output state */
    if (output_state)
      gst_video_codec_state_unref (output_state);
    output_state =
        gst_video_decoder_set_output_state (decoder, GST_RSVG_VIDEO_FORMAT,
        dimension.width, dimension.height, rsvg->input_state);
  }

  ret = gst_video_decoder_allocate_output_frame (decoder, frame);

  if (ret != GST_FLOW_OK) {
    g_object_unref (handle);
    gst_video_codec_state_unref (output_state);
    GST_ERROR_OBJECT (rsvg, "Buffer allocation failed %s",
        gst_flow_get_name (ret));
    return ret;
  }

  GST_LOG_OBJECT (rsvg, "render image at %d x %d",
      GST_VIDEO_INFO_HEIGHT (&output_state->info),
      GST_VIDEO_INFO_WIDTH (&output_state->info));


  if (!gst_video_frame_map (&vframe,
          &output_state->info, frame->output_buffer, GST_MAP_READWRITE)) {
    GST_ERROR_OBJECT (rsvg, "Failed to get SVG image");
    g_object_unref (handle);
    gst_video_codec_state_unref (output_state);
    return GST_FLOW_ERROR;
  }
  surface =
      cairo_image_surface_create_for_data (GST_VIDEO_FRAME_PLANE_DATA (&vframe,
          0), CAIRO_FORMAT_ARGB32, GST_VIDEO_FRAME_WIDTH (&vframe),
      GST_VIDEO_FRAME_HEIGHT (&vframe), GST_VIDEO_FRAME_PLANE_STRIDE (&vframe,
          0));

  cr = cairo_create (surface);
  cairo_set_operator (cr, CAIRO_OPERATOR_CLEAR);
  cairo_set_source_rgba (cr, 1.0, 1.0, 1.0, 0.0);
  cairo_paint (cr);
  cairo_set_operator (cr, CAIRO_OPERATOR_OVER);
  cairo_set_source_rgba (cr, 0.0, 0.0, 0.0, 1.0);

  scalex = scaley = 1.0;
  if (GST_VIDEO_INFO_WIDTH (&output_state->info) != dimension.width) {
    scalex =
        ((gdouble) GST_VIDEO_INFO_WIDTH (&output_state->info)) /
        ((gdouble) dimension.width);
  }
  if (GST_VIDEO_INFO_HEIGHT (&output_state->info) != dimension.height) {
    scaley =
        ((gdouble) GST_VIDEO_INFO_HEIGHT (&output_state->info)) /
        ((gdouble) dimension.height);
  }
  cairo_scale (cr, scalex, scaley);
  rsvg_handle_render_cairo (handle, cr);

  g_object_unref (handle);
  cairo_destroy (cr);
  cairo_surface_destroy (surface);

  /* Now unpremultiply Cairo's ARGB to match GStreamer's */
  gst_rsvg_decode_unpremultiply (GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0),
      GST_VIDEO_FRAME_WIDTH (&vframe), GST_VIDEO_FRAME_HEIGHT (&vframe));

  gst_video_codec_state_unref (output_state);
  gst_buffer_unmap (buffer, &minfo);
  gst_video_frame_unmap (&vframe);

  return ret;
}