Beispiel #1
0
void GStreamerReader::ImageDataFromVideoFrame(GstVideoFrame *aFrame,
                                              PlanarYCbCrImage::Data *aData)
{
  NS_ASSERTION(GST_VIDEO_INFO_IS_YUV(&mVideoInfo),
               "Non-YUV video frame formats not supported");
  NS_ASSERTION(GST_VIDEO_FRAME_N_COMPONENTS(aFrame) == 3,
               "Unsupported number of components in video frame");

  aData->mPicX = aData->mPicY = 0;
  aData->mPicSize = gfx::IntSize(mPicture.width, mPicture.height);
  aData->mStereoMode = StereoMode::MONO;

  aData->mYChannel = GST_VIDEO_FRAME_COMP_DATA(aFrame, 0);
  aData->mYStride = GST_VIDEO_FRAME_COMP_STRIDE(aFrame, 0);
  aData->mYSize = gfx::IntSize(GST_VIDEO_FRAME_COMP_WIDTH(aFrame, 0),
                          GST_VIDEO_FRAME_COMP_HEIGHT(aFrame, 0));
  aData->mYSkip = GST_VIDEO_FRAME_COMP_PSTRIDE(aFrame, 0) - 1;
  aData->mCbCrStride = GST_VIDEO_FRAME_COMP_STRIDE(aFrame, 1);
  aData->mCbCrSize = gfx::IntSize(GST_VIDEO_FRAME_COMP_WIDTH(aFrame, 1),
                             GST_VIDEO_FRAME_COMP_HEIGHT(aFrame, 1));
  aData->mCbChannel = GST_VIDEO_FRAME_COMP_DATA(aFrame, 1);
  aData->mCrChannel = GST_VIDEO_FRAME_COMP_DATA(aFrame, 2);
  aData->mCbSkip = GST_VIDEO_FRAME_COMP_PSTRIDE(aFrame, 1) - 1;
  aData->mCrSkip = GST_VIDEO_FRAME_COMP_PSTRIDE(aFrame, 2) - 1;
}
static void
gst_video_crop_transform_packed_simple (GstVideoCrop * vcrop,
    GstVideoFrame * in_frame, GstVideoFrame * out_frame)
{
  guint8 *in_data, *out_data;
  gint width, height;
  guint i, dx;
  gint in_stride, out_stride;

  width = GST_VIDEO_FRAME_WIDTH (out_frame);
  height = GST_VIDEO_FRAME_HEIGHT (out_frame);

  in_data = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
  out_data = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);

  in_stride = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0);
  out_stride = GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0);

  in_data += vcrop->crop_top * in_stride;
  in_data += vcrop->crop_left * GST_VIDEO_FRAME_COMP_PSTRIDE (in_frame, 0);

  dx = width * GST_VIDEO_FRAME_COMP_PSTRIDE (out_frame, 0);

  for (i = 0; i < height; ++i) {
    memcpy (out_data, in_data, dx);
    in_data += in_stride;
    out_data += out_stride;
  }
}
Beispiel #3
0
static void
gst_video_balance_packed_yuv (GstVideoBalance * videobalance,
    GstVideoFrame * frame)
{
  gint x, y, stride;
  guint8 *ydata, *udata, *vdata;
  gint yoff, uoff, voff;
  gint width, height;
  gint width2, height2;
  guint8 *tabley = videobalance->tabley;
  guint8 **tableu = videobalance->tableu;
  guint8 **tablev = videobalance->tablev;

  width = GST_VIDEO_FRAME_WIDTH (frame);
  height = GST_VIDEO_FRAME_HEIGHT (frame);

  stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
  ydata = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
  yoff = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);

  for (y = 0; y < height; y++) {
    guint8 *yptr;

    yptr = ydata + y * stride;
    for (x = 0; x < width; x++) {
      *yptr = tabley[*yptr];
      yptr += yoff;
    }
  }

  width2 = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1);
  height2 = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1);

  udata = GST_VIDEO_FRAME_COMP_DATA (frame, 1);
  vdata = GST_VIDEO_FRAME_COMP_DATA (frame, 2);
  uoff = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 1);
  voff = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 2);

  for (y = 0; y < height2; y++) {
    guint8 *uptr, *vptr;
    guint8 u1, v1;

    uptr = udata + y * stride;
    vptr = vdata + y * stride;

    for (x = 0; x < width2; x++) {
      u1 = *uptr;
      v1 = *vptr;

      *uptr = tableu[u1][v1];
      *vptr = tablev[u1][v1];

      uptr += uoff;
      vptr += voff;
    }
  }
}
static void
gst_video_crop_transform_packed_complex (GstVideoCrop * vcrop,
    GstVideoFrame * in_frame, GstVideoFrame * out_frame)
{
  guint8 *in_data, *out_data;
  guint i, dx;
  gint width, height;
  gint in_stride;
  gint out_stride;

  width = GST_VIDEO_FRAME_WIDTH (out_frame);
  height = GST_VIDEO_FRAME_HEIGHT (out_frame);

  in_data = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
  out_data = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);

  in_stride = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0);
  out_stride = GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0);

  in_data += vcrop->crop_top * in_stride;

  /* rounding down here so we end up at the start of a macro-pixel and not
   * in the middle of one */
  in_data += ROUND_DOWN_2 (vcrop->crop_left) *
      GST_VIDEO_FRAME_COMP_PSTRIDE (in_frame, 0);

  dx = width * GST_VIDEO_FRAME_COMP_PSTRIDE (out_frame, 0);

  /* UYVY = 4:2:2 - [U0 Y0 V0 Y1] [U2 Y2 V2 Y3] [U4 Y4 V4 Y5]
   * YUYV = 4:2:2 - [Y0 U0 Y1 V0] [Y2 U2 Y3 V2] [Y4 U4 Y5 V4] = YUY2 */
  if ((vcrop->crop_left % 2) != 0) {
    for (i = 0; i < height; ++i) {
      gint j;

      memcpy (out_data, in_data, dx);

      /* move just the Y samples one pixel to the left, don't worry about
       * chroma shift */
      for (j = vcrop->macro_y_off; j < out_stride - 2; j += 2)
        out_data[j] = in_data[j + 2];

      in_data += in_stride;
      out_data += out_stride;
    }
  } else {
    for (i = 0; i < height; ++i) {
      memcpy (out_data, in_data, dx);
      in_data += in_stride;
      out_data += out_stride;
    }
  }
}
Beispiel #5
0
/* Copy the frame data from the GstBuffer (from decoder)
 * to the picture obtained from downstream in VLC.
 * This function should be avoided as much
 * as possible, since it involves a complete frame copy. */
static void gst_CopyPicture( picture_t *p_pic, GstVideoFrame *p_frame )
{
    int i_plane, i_planes, i_line, i_dst_stride, i_src_stride;
    uint8_t *p_dst, *p_src;
    int i_w, i_h;

    i_planes = p_pic->i_planes;
    for( i_plane = 0; i_plane < i_planes; i_plane++ )
    {
        p_dst = p_pic->p[i_plane].p_pixels;
        p_src = GST_VIDEO_FRAME_PLANE_DATA( p_frame, i_plane );
        i_dst_stride = p_pic->p[i_plane].i_pitch;
        i_src_stride = GST_VIDEO_FRAME_PLANE_STRIDE( p_frame, i_plane );

        i_w = GST_VIDEO_FRAME_COMP_WIDTH( p_frame,
                i_plane ) * GST_VIDEO_FRAME_COMP_PSTRIDE( p_frame, i_plane );
        i_h = GST_VIDEO_FRAME_COMP_HEIGHT( p_frame, i_plane );

        for( i_line = 0;
                i_line < __MIN( p_pic->p[i_plane].i_lines, i_h );
                i_line++ )
        {
            memcpy( p_dst, p_src, i_w );
            p_src += i_src_stride;
            p_dst += i_dst_stride;
        }
    }
}
Beispiel #6
0
static void
gst_video_balance_packed_rgb (GstVideoBalance * videobalance,
    GstVideoFrame * frame)
{
  gint i, j, height;
  gint width, stride, row_wrap;
  gint pixel_stride;
  guint8 *data;
  gint offsets[3];
  gint r, g, b;
  gint y, u, v;
  gint u_tmp, v_tmp;
  guint8 *tabley = videobalance->tabley;
  guint8 **tableu = videobalance->tableu;
  guint8 **tablev = videobalance->tablev;

  width = GST_VIDEO_FRAME_WIDTH (frame);
  height = GST_VIDEO_FRAME_HEIGHT (frame);

  offsets[0] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 0);
  offsets[1] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 1);
  offsets[2] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 2);

  data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
  stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);

  pixel_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
  row_wrap = stride - pixel_stride * width;

  for (i = 0; i < height; i++) {
    for (j = 0; j < width; j++) {
      r = data[offsets[0]];
      g = data[offsets[1]];
      b = data[offsets[2]];

      y = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 0, r, g, b);
      u_tmp = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 1, r, g, b);
      v_tmp = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 2, r, g, b);

      y = CLAMP (y, 0, 255);
      u_tmp = CLAMP (u_tmp, 0, 255);
      v_tmp = CLAMP (v_tmp, 0, 255);

      y = tabley[y];
      u = tableu[u_tmp][v_tmp];
      v = tablev[u_tmp][v_tmp];

      r = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 0, y, u, v);
      g = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 1, y, u, v);
      b = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 2, y, u, v);

      data[offsets[0]] = CLAMP (r, 0, 255);
      data[offsets[1]] = CLAMP (g, 0, 255);
      data[offsets[2]] = CLAMP (b, 0, 255);
      data += pixel_stride;
    }
    data += row_wrap;
  }
}
static void
gst_color_effects_transform_rgb (GstColorEffects * filter,
    GstVideoFrame * frame)
{
  gint i, j;
  gint width, height;
  gint pixel_stride, row_stride, row_wrap;
  guint32 r, g, b;
  guint32 luma;
  gint offsets[3];
  guint8 *data;

  data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
  offsets[0] = GST_VIDEO_FRAME_COMP_POFFSET (frame, 0);
  offsets[1] = GST_VIDEO_FRAME_COMP_POFFSET (frame, 1);
  offsets[2] = GST_VIDEO_FRAME_COMP_POFFSET (frame, 2);

  width = GST_VIDEO_FRAME_WIDTH (frame);
  height = GST_VIDEO_FRAME_HEIGHT (frame);

  row_stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
  pixel_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
  row_wrap = row_stride - pixel_stride * width;

  /* transform */

  for (i = 0; i < height; i++) {
    for (j = 0; j < width; j++) {
      r = data[offsets[0]];
      g = data[offsets[1]];
      b = data[offsets[2]];
      if (filter->map_luma) {
        /* BT. 709 coefficients in B8 fixed point */
        /* 0.2126 R + 0.7152 G + 0.0722 B */
        luma = ((r << 8) * 54) + ((g << 8) * 183) + ((b << 8) * 19);
        luma >>= 16;            /* get integer part */
        luma *= 3;              /* times 3 to retrieve the correct pixel from
                                 * the lut */
        /* map luma to lookup table */
        /* src.luma |-> table[luma].rgb */
        data[offsets[0]] = filter->table[luma];
        data[offsets[1]] = filter->table[luma + 1];
        data[offsets[2]] = filter->table[luma + 2];
      } else {
        /* map each color component to the correspondent lut color */
        /* src.r |-> table[r].r */
        /* src.g |-> table[g].g */
        /* src.b |-> table[b].b */
        data[offsets[0]] = filter->table[r * 3];
        data[offsets[1]] = filter->table[g * 3 + 1];
        data[offsets[2]] = filter->table[b * 3 + 2];
      }
      data += pixel_stride;
    }
Beispiel #8
0
static void
gst_jpeg_dec_decode_rgb (GstJpegDec * dec, GstVideoFrame * frame)
{
  guchar *r_rows[16], *g_rows[16], *b_rows[16];
  guchar **scanarray[3] = { r_rows, g_rows, b_rows };
  gint i, j, k;
  gint lines;
  guint8 *base[3];
  guint pstride, rstride;
  gint width, height;

  GST_DEBUG_OBJECT (dec, "indirect decoding of RGB");

  width = GST_VIDEO_FRAME_WIDTH (frame);
  height = GST_VIDEO_FRAME_HEIGHT (frame);

  if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
    return;

  for (i = 0; i < 3; i++)
    base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);

  pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
  rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);

  memcpy (r_rows, dec->idr_y, 16 * sizeof (gpointer));
  memcpy (g_rows, dec->idr_u, 16 * sizeof (gpointer));
  memcpy (b_rows, dec->idr_v, 16 * sizeof (gpointer));

  i = 0;
  while (i < height) {
    lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
    if (G_LIKELY (lines > 0)) {
      for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
        gint p;

        p = 0;
        for (k = 0; k < width; k++) {
          base[0][p] = r_rows[j][k];
          base[1][p] = g_rows[j][k];
          base[2][p] = b_rows[j][k];
          p += pstride;
        }
        base[0] += rstride;
        base[1] += rstride;
        base[2] += rstride;
      }
    } else {
      GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
    }
  }
}
Beispiel #9
0
static void
gst_gamma_packed_rgb_ip (GstGamma * gamma, GstVideoFrame * frame)
{
  gint i, j, height;
  gint width, stride, row_wrap;
  gint pixel_stride;
  const guint8 *table = gamma->gamma_table;
  gint offsets[3];
  gint r, g, b;
  gint y, u, v;
  guint8 *data;

  data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
  stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
  width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0);
  height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 0);

  offsets[0] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 0);
  offsets[1] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 1);
  offsets[2] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 2);

  pixel_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
  row_wrap = stride - pixel_stride * width;

  for (i = 0; i < height; i++) {
    for (j = 0; j < width; j++) {
      r = data[offsets[0]];
      g = data[offsets[1]];
      b = data[offsets[2]];

      y = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 0, r, g, b);
      u = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 1, r, g, b);
      v = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 2, r, g, b);

      y = table[CLAMP (y, 0, 255)];
      r = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 0, y, u, v);
      g = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 1, y, u, v);
      b = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 2, y, u, v);

      data[offsets[0]] = CLAMP (r, 0, 255);
      data[offsets[1]] = CLAMP (g, 0, 255);
      data[offsets[2]] = CLAMP (b, 0, 255);
      data += pixel_stride;
    }
    data += row_wrap;
  }
}
Beispiel #10
0
/**
 * gst_video_frame_copy_plane:
 * @dest: a #GstVideoFrame
 * @src: a #GstVideoFrame
 * @plane: a plane
 *
 * Copy the plane with index @plane from @src to @dest.
 *
 * Returns: TRUE if the contents could be copied.
 */
gboolean
gst_video_frame_copy_plane (GstVideoFrame * dest, const GstVideoFrame * src,
                            guint plane)
{
    const GstVideoInfo *sinfo;
    GstVideoInfo *dinfo;
    guint w, h, j;
    guint8 *sp, *dp;
    gint ss, ds;

    g_return_val_if_fail (dest != NULL, FALSE);
    g_return_val_if_fail (src != NULL, FALSE);

    sinfo = &src->info;
    dinfo = &dest->info;

    g_return_val_if_fail (dinfo->finfo->format == sinfo->finfo->format, FALSE);
    g_return_val_if_fail (dinfo->width == sinfo->width
                          && dinfo->height == sinfo->height, FALSE);
    g_return_val_if_fail (dinfo->finfo->n_planes > plane, FALSE);

    sp = src->data[plane];
    dp = dest->data[plane];

    ss = sinfo->stride[plane];
    ds = dinfo->stride[plane];

    /* FIXME. assumes subsampling of component N is the same as plane N, which is
     * currently true for all formats we have but it might not be in the future. */
    w = GST_VIDEO_FRAME_COMP_WIDTH (dest,
                                    plane) * GST_VIDEO_FRAME_COMP_PSTRIDE (dest, plane);
    h = GST_VIDEO_FRAME_COMP_HEIGHT (dest, plane);

    GST_CAT_DEBUG (GST_CAT_PERFORMANCE, "copy plane %d, w:%d h:%d ", plane, w, h);

    for (j = 0; j < h; j++) {
        memcpy (dp, sp, w);
        dp += ds;
        sp += ss;
    }
    return TRUE;
}
Beispiel #11
0
static void
gst_vp9_dec_image_to_buffer (GstVP9Dec * dec, const vpx_image_t * img,
    GstBuffer * buffer)
{
  int deststride, srcstride, height, width, line, comp;
  guint8 *dest, *src;
  GstVideoFrame frame;
  GstVideoInfo *info = &dec->output_state->info;

  if (!gst_video_frame_map (&frame, info, buffer, GST_MAP_WRITE)) {
    GST_ERROR_OBJECT (dec, "Could not map video buffer");
    return;
  }

  for (comp = 0; comp < 3; comp++) {
    dest = GST_VIDEO_FRAME_COMP_DATA (&frame, comp);
    src = img->planes[comp];
    width = GST_VIDEO_FRAME_COMP_WIDTH (&frame, comp)
        * GST_VIDEO_FRAME_COMP_PSTRIDE (&frame, comp);
    height = GST_VIDEO_FRAME_COMP_HEIGHT (&frame, comp);
    deststride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, comp);
    srcstride = img->stride[comp];

    if (srcstride == deststride) {
      GST_TRACE_OBJECT (dec, "Stride matches. Comp %d: %d, copying full plane",
          comp, srcstride);
      memcpy (dest, src, srcstride * height);
    } else {
      GST_TRACE_OBJECT (dec, "Stride mismatch. Comp %d: %d != %d, copying "
          "line by line.", comp, srcstride, deststride);
      for (line = 0; line < height; line++) {
        memcpy (dest, src, width);
        dest += deststride;
        src += srcstride;
      }
    }
  }

  gst_video_frame_unmap (&frame);
}
Beispiel #12
0
static void
gst_gamma_packed_yuv_ip (GstGamma * gamma, GstVideoFrame * frame)
{
  gint i, j, height;
  gint width, stride, row_wrap;
  gint pixel_stride;
  const guint8 *table = gamma->gamma_table;
  guint8 *data;

  data = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
  stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
  width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0);
  height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 0);
  pixel_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
  row_wrap = stride - pixel_stride * width;

  for (i = 0; i < height; i++) {
    for (j = 0; j < width; j++) {
      *data = table[*data];
      data += pixel_stride;
    }
    data += row_wrap;
  }
}
static GstFlowReturn
gst_video_mark_yuv (GstSimpleVideoMark * simplevideomark, GstVideoFrame * frame)
{
  gint i, pw, ph, row_stride, pixel_stride;
  gint width, height, offset_calc, x, y;
  guint8 *d;
  guint64 pattern_shift;
  guint8 color;
  gint total_pattern;

  width = frame->info.width;
  height = frame->info.height;

  pw = simplevideomark->pattern_width;
  ph = simplevideomark->pattern_height;
  row_stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
  pixel_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);

  d = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
  offset_calc =
      row_stride * (height - ph - simplevideomark->bottom_offset) +
      pixel_stride * simplevideomark->left_offset;
  x = simplevideomark->left_offset;
  y = height - ph - simplevideomark->bottom_offset;

  total_pattern =
      simplevideomark->pattern_count + simplevideomark->pattern_data_count;
  /* If x and y offset values are outside the video, no need to draw */
  if ((x + (pw * total_pattern)) < 0 || x > width || (y + height) < 0
      || y > height) {
    GST_ERROR_OBJECT (simplevideomark,
        "simplevideomark pattern is outside the video. Not drawing.");
    return GST_FLOW_OK;
  }

  /* Offset calculation less than 0, then reset to 0 */
  if (offset_calc < 0)
    offset_calc = 0;
  /* Y position of mark is negative or pattern exceeds the video height,
     then recalculate pattern height for partial display */
  if (y < 0)
    ph += y;
  else if ((y + ph) > height)
    ph = height - y;
  /* If pattern height is less than 0, need not draw anything */
  if (ph < 0)
    return GST_FLOW_OK;

  /* move to start of bottom left */
  d += offset_calc;

  /* draw the bottom left pixels */
  for (i = 0; i < simplevideomark->pattern_count; i++) {
    gint draw_pw;

    if (i & 1)
      /* odd pixels must be white */
      color = 255;
    else
      color = 0;

    /* X position of mark is negative or pattern exceeds the video width,
       then recalculate pattern width for partial display */
    draw_pw = calculate_pw (pw, x, width);
    /* If pattern width is less than 0, continue with the next pattern */
    if (draw_pw < 0)
      continue;

    /* draw box of width * height */
    gst_video_mark_draw_box (simplevideomark, d, draw_pw, ph, row_stride,
        pixel_stride, color);

    /* move to i-th pattern */
    d += pixel_stride * draw_pw;
    x += draw_pw;

    if ((x + (pw * (total_pattern - i - 1))) < 0 || x >= width)
      return GST_FLOW_OK;
  }

  pattern_shift =
      G_GUINT64_CONSTANT (1) << (simplevideomark->pattern_data_count - 1);

  /* get the data of the pattern */
  for (i = 0; i < simplevideomark->pattern_data_count; i++) {
    gint draw_pw;
    if (simplevideomark->pattern_data & pattern_shift)
      color = 255;
    else
      color = 0;

    /* X position of mark is negative or pattern exceeds the video width,
       then recalculate pattern width for partial display */
    draw_pw = calculate_pw (pw, x, width);
    /* If pattern width is less than 0, continue with the next pattern */
    if (draw_pw < 0)
      continue;

    gst_video_mark_draw_box (simplevideomark, d, draw_pw, ph, row_stride,
        pixel_stride, color);

    pattern_shift >>= 1;

    /* move to i-th pattern data */
    d += pixel_stride * draw_pw;
    x += draw_pw;

    if ((x + (pw * (simplevideomark->pattern_data_count - i - 1))) < 0
        || x >= width)
      return GST_FLOW_OK;
  }

  return GST_FLOW_OK;
}
Beispiel #14
0
static GstFlowReturn
gst_compositor_aggregate_frames (GstVideoAggregator * vagg, GstBuffer * outbuf)
{
  GList *l;
  GstCompositor *self = GST_COMPOSITOR (vagg);
  BlendFunction composite;
  GstVideoFrame out_frame, *outframe;

  if (!gst_video_frame_map (&out_frame, &vagg->info, outbuf, GST_MAP_WRITE)) {

    return GST_FLOW_ERROR;
  }

  outframe = &out_frame;
  /* default to blending */
  composite = self->blend;
  switch (self->background) {
    case COMPOSITOR_BACKGROUND_CHECKER:
      self->fill_checker (outframe);
      break;
    case COMPOSITOR_BACKGROUND_BLACK:
      self->fill_color (outframe, 16, 128, 128);
      break;
    case COMPOSITOR_BACKGROUND_WHITE:
      self->fill_color (outframe, 240, 128, 128);
      break;
    case COMPOSITOR_BACKGROUND_TRANSPARENT:
    {
      guint i, plane, num_planes, height;

      num_planes = GST_VIDEO_FRAME_N_PLANES (outframe);
      for (plane = 0; plane < num_planes; ++plane) {
        guint8 *pdata;
        gsize rowsize, plane_stride;

        pdata = GST_VIDEO_FRAME_PLANE_DATA (outframe, plane);
        plane_stride = GST_VIDEO_FRAME_PLANE_STRIDE (outframe, plane);
        rowsize = GST_VIDEO_FRAME_COMP_WIDTH (outframe, plane)
            * GST_VIDEO_FRAME_COMP_PSTRIDE (outframe, plane);
        height = GST_VIDEO_FRAME_COMP_HEIGHT (outframe, plane);
        for (i = 0; i < height; ++i) {
          memset (pdata, 0, rowsize);
          pdata += plane_stride;
        }
      }

      /* use overlay to keep background transparent */
      composite = self->overlay;
      break;
    }
  }

  GST_OBJECT_LOCK (vagg);
  for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
    GstVideoAggregatorPad *pad = l->data;
    GstCompositorPad *compo_pad = GST_COMPOSITOR_PAD (pad);

    if (pad->aggregated_frame != NULL) {
      composite (pad->aggregated_frame, compo_pad->xpos, compo_pad->ypos,
          compo_pad->alpha, outframe);
    }
  }
  GST_OBJECT_UNLOCK (vagg);

  gst_video_frame_unmap (outframe);

  return GST_FLOW_OK;
}
static void
gst_video_detect_yuv (GstSimpleVideoMarkDetect * simplevideomarkdetect,
    GstVideoFrame * frame)
{
  gdouble brightness;
  gint i, pw, ph, row_stride, pixel_stride;
  gint width, height, offset_calc, x, y;
  guint8 *d;
  guint64 pattern_data;
  gint total_pattern;

  width = frame->info.width;
  height = frame->info.height;

  pw = simplevideomarkdetect->pattern_width;
  ph = simplevideomarkdetect->pattern_height;
  row_stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
  pixel_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);

  d = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
  /* move to start of bottom left, adjust for offsets */
  offset_calc =
      row_stride * (height - ph - simplevideomarkdetect->bottom_offset) +
      pixel_stride * simplevideomarkdetect->left_offset;
  x = simplevideomarkdetect->left_offset;
  y = height - ph - simplevideomarkdetect->bottom_offset;

  total_pattern =
      simplevideomarkdetect->pattern_count +
      simplevideomarkdetect->pattern_data_count;
  /* If x and y offset values are outside the video, no need to analyze */
  if ((x + (pw * total_pattern)) < 0 || x > width || (y + height) < 0
      || y > height) {
    GST_ERROR_OBJECT (simplevideomarkdetect,
        "simplevideomarkdetect pattern is outside the video. Not Analyzing.");
    return;
  }

  /* Offset calculation less than 0, then reset to 0 */
  if (offset_calc < 0)
    offset_calc = 0;
  /* Y position of mark is negative or pattern exceeds the video height,
     then recalculate pattern height for partial display */
  if (y < 0)
    ph += y;
  else if ((y + ph) > height)
    ph = height - y;
  /* If pattern height is less than 0, need not analyze anything */
  if (ph < 0)
    return;

  /* move to start of bottom left */
  d += offset_calc;

  /* analyze the bottom left pixels */
  for (i = 0; i < simplevideomarkdetect->pattern_count; i++) {
    gint draw_pw;
    /* calc brightness of width * height box */
    brightness =
        gst_video_detect_calc_brightness (simplevideomarkdetect, d, pw, ph,
        row_stride, pixel_stride);

    GST_DEBUG_OBJECT (simplevideomarkdetect, "brightness %f", brightness);

    if (i & 1) {
      /* odd pixels must be white, all pixels darker than the center +
       * sensitivity are considered wrong. */
      if (brightness <
          (simplevideomarkdetect->pattern_center +
              simplevideomarkdetect->pattern_sensitivity))
        goto no_pattern;
    } else {
      /* even pixels must be black, pixels lighter than the center - sensitivity
       * are considered wrong. */
      if (brightness >
          (simplevideomarkdetect->pattern_center -
              simplevideomarkdetect->pattern_sensitivity))
        goto no_pattern;
    }

    /* X position of mark is negative or pattern exceeds the video width,
       then recalculate pattern width for partial display */
    draw_pw = calculate_pw (pw, x, width);
    /* If pattern width is less than 0, continue with the next pattern */
    if (draw_pw < 0)
      continue;

    /* move to i-th pattern */
    d += pixel_stride * draw_pw;
    x += draw_pw;

    if ((x + (pw * (total_pattern - i - 1))) < 0 || x >= width)
      break;
  }
  GST_DEBUG_OBJECT (simplevideomarkdetect, "found pattern");

  pattern_data = 0;

  /* get the data of the pattern */
  for (i = 0; i < simplevideomarkdetect->pattern_data_count; i++) {
    gint draw_pw;
    /* calc brightness of width * height box */
    brightness =
        gst_video_detect_calc_brightness (simplevideomarkdetect, d, pw, ph,
        row_stride, pixel_stride);
    /* update pattern, we just use the center to decide between black and white. */
    pattern_data <<= 1;
    if (brightness > simplevideomarkdetect->pattern_center)
      pattern_data |= 1;

    /* X position of mark is negative or pattern exceeds the video width,
       then recalculate pattern width for partial display */
    draw_pw = calculate_pw (pw, x, width);
    /* If pattern width is less than 0, continue with the next pattern */
    if (draw_pw < 0)
      continue;

    /* move to i-th pattern data */
    d += pixel_stride * draw_pw;
    x += draw_pw;

    if ((x + (pw * (simplevideomarkdetect->pattern_data_count - i - 1))) < 0
        || x >= width)
      break;
  }

  GST_DEBUG_OBJECT (simplevideomarkdetect, "have data %" G_GUINT64_FORMAT,
      pattern_data);

  simplevideomarkdetect->in_pattern = TRUE;
  gst_video_detect_post_message (simplevideomarkdetect, frame->buffer,
      pattern_data);

  return;

no_pattern:
  {
    GST_DEBUG_OBJECT (simplevideomarkdetect, "no pattern found");
    if (simplevideomarkdetect->in_pattern) {
      simplevideomarkdetect->in_pattern = FALSE;
      gst_video_detect_post_message (simplevideomarkdetect, frame->buffer, 0);
    }
    return;
  }
}
static GstFlowReturn
gst_gdk_pixbuf_dec_flush (GstGdkPixbufDec * filter)
{
  GstBuffer *outbuf;
  GdkPixbuf *pixbuf;
  int y;
  guint8 *out_pix;
  guint8 *in_pix;
  int in_rowstride, out_rowstride;
  GstFlowReturn ret;
  GstCaps *caps = NULL;
  gint width, height;
  gint n_channels;
  GstVideoFrame frame;

  pixbuf = gdk_pixbuf_loader_get_pixbuf (filter->pixbuf_loader);
  if (pixbuf == NULL)
    goto no_pixbuf;

  width = gdk_pixbuf_get_width (pixbuf);
  height = gdk_pixbuf_get_height (pixbuf);

  if (GST_VIDEO_INFO_FORMAT (&filter->info) == GST_VIDEO_FORMAT_UNKNOWN) {
    GstVideoInfo info;
    GstVideoFormat fmt;

    GST_DEBUG ("Set size to %dx%d", width, height);

    n_channels = gdk_pixbuf_get_n_channels (pixbuf);
    switch (n_channels) {
      case 3:
        fmt = GST_VIDEO_FORMAT_RGB;
        break;
      case 4:
        fmt = GST_VIDEO_FORMAT_RGBA;
        break;
      default:
        goto channels_not_supported;
    }


    gst_video_info_init (&info);
    gst_video_info_set_format (&info, fmt, width, height);
    info.fps_n = filter->in_fps_n;
    info.fps_d = filter->in_fps_d;
    caps = gst_video_info_to_caps (&info);

    filter->info = info;

    gst_pad_set_caps (filter->srcpad, caps);
    gst_caps_unref (caps);

    gst_gdk_pixbuf_dec_setup_pool (filter, &info);
  }

  ret = gst_buffer_pool_acquire_buffer (filter->pool, &outbuf, NULL);
  if (ret != GST_FLOW_OK)
    goto no_buffer;

  GST_BUFFER_TIMESTAMP (outbuf) = filter->last_timestamp;
  GST_BUFFER_DURATION (outbuf) = GST_CLOCK_TIME_NONE;

  in_pix = gdk_pixbuf_get_pixels (pixbuf);
  in_rowstride = gdk_pixbuf_get_rowstride (pixbuf);

  gst_video_frame_map (&frame, &filter->info, outbuf, GST_MAP_WRITE);
  out_pix = GST_VIDEO_FRAME_PLANE_DATA (&frame, 0);
  out_rowstride = GST_VIDEO_FRAME_PLANE_STRIDE (&frame, 0);

  for (y = 0; y < height; y++) {
    memcpy (out_pix, in_pix, width * GST_VIDEO_FRAME_COMP_PSTRIDE (&frame, 0));
    in_pix += in_rowstride;
    out_pix += out_rowstride;
  }

  gst_video_frame_unmap (&frame);

  GST_DEBUG ("pushing... %" G_GSIZE_FORMAT " bytes",
      gst_buffer_get_size (outbuf));
  ret = gst_pad_push (filter->srcpad, outbuf);

  if (ret != GST_FLOW_OK)
    GST_DEBUG_OBJECT (filter, "flow: %s", gst_flow_get_name (ret));

  return ret;

  /* ERRORS */
no_pixbuf:
  {
    GST_ELEMENT_ERROR (filter, STREAM, DECODE, (NULL), ("error geting pixbuf"));
    return GST_FLOW_ERROR;
  }
channels_not_supported:
  {
    GST_ELEMENT_ERROR (filter, STREAM, DECODE, (NULL),
        ("%d channels not supported", n_channels));
    return GST_FLOW_ERROR;
  }
no_buffer:
  {
    GST_DEBUG ("Failed to create outbuffer - %s", gst_flow_get_name (ret));
    return ret;
  }
}
Beispiel #17
0
static int
gst_libde265_dec_get_buffer (de265_decoder_context * ctx,
    struct de265_image_spec *spec, struct de265_image *img, void *userdata)
{
  GstVideoDecoder *base = (GstVideoDecoder *) userdata;
  GstLibde265Dec *dec = GST_LIBDE265_DEC (base);
  GstVideoCodecFrame *frame = NULL;
  int i;
  int width = spec->width;
  int height = spec->height;
  GstFlowReturn ret;
  struct GstLibde265FrameRef *ref;
  GstVideoInfo *info;
  int frame_number;

  frame_number = (uintptr_t) de265_get_image_user_data (img) - 1;
  if (G_UNLIKELY (frame_number == -1)) {
    /* should not happen... */
    GST_WARNING_OBJECT (base, "Frame has no number assigned!");
    goto fallback;
  }

  frame = gst_video_decoder_get_frame (base, frame_number);
  if (G_UNLIKELY (frame == NULL)) {
    /* should not happen... */
    GST_WARNING_OBJECT (base, "Couldn't get codec frame!");
    goto fallback;
  }

  if (width % spec->alignment) {
    width += spec->alignment - (width % spec->alignment);
  }
  if (width != spec->visible_width || height != spec->visible_height) {
    /* clipping not supported for now */
    goto fallback;
  }

  ret = _gst_libde265_image_available (base, width, height);
  if (G_UNLIKELY (ret != GST_FLOW_OK)) {
    GST_ERROR_OBJECT (dec, "Failed to notify about available image");
    goto fallback;
  }

  ret =
      gst_video_decoder_allocate_output_frame (GST_VIDEO_DECODER (dec), frame);
  if (G_UNLIKELY (ret != GST_FLOW_OK)) {
    GST_ERROR_OBJECT (dec, "Failed to allocate output buffer");
    goto fallback;
  }

  ref = (struct GstLibde265FrameRef *) g_malloc0 (sizeof (*ref));
  g_assert (ref != NULL);
  ref->decoder = base;
  ref->frame = frame;

  gst_buffer_replace (&ref->buffer, frame->output_buffer);
  gst_buffer_replace (&frame->output_buffer, NULL);

  info = &dec->output_state->info;
  if (!gst_video_frame_map (&ref->vframe, info, ref->buffer, GST_MAP_READWRITE)) {
    GST_ERROR_OBJECT (dec, "Failed to map frame output buffer");
    goto error;
  }

  ref->mapped = TRUE;
  if (GST_VIDEO_FRAME_PLANE_STRIDE (&ref->vframe,
          0) < width * GST_VIDEO_FRAME_COMP_PSTRIDE (&ref->vframe, 0)) {
    GST_DEBUG_OBJECT (dec, "plane 0: pitch too small (%d/%d*%d)",
        GST_VIDEO_FRAME_PLANE_STRIDE (&ref->vframe, 0), width,
        GST_VIDEO_FRAME_COMP_PSTRIDE (&ref->vframe, 0));
    goto error;
  }

  if (GST_VIDEO_FRAME_COMP_HEIGHT (&ref->vframe, 0) < height) {
    GST_DEBUG_OBJECT (dec, "plane 0: lines too few (%d/%d)",
        GST_VIDEO_FRAME_COMP_HEIGHT (&ref->vframe, 0), height);
    goto error;
  }

  for (i = 0; i < 3; i++) {
    uint8_t *data;
    int stride = GST_VIDEO_FRAME_PLANE_STRIDE (&ref->vframe, i);
    if (stride % spec->alignment) {
      GST_DEBUG_OBJECT (dec, "plane %d: pitch not aligned (%d%%%d)",
          i, stride, spec->alignment);
      goto error;
    }

    data = GST_VIDEO_FRAME_PLANE_DATA (&ref->vframe, i);
    if ((uintptr_t) (data) % spec->alignment) {
      GST_DEBUG_OBJECT (dec, "plane %d not aligned", i);
      goto error;
    }

    de265_set_image_plane (img, i, data, stride, ref);
  }
  return 1;

error:
  gst_libde265_dec_release_frame_ref (ref);
  frame = NULL;

fallback:
  if (frame != NULL) {
    gst_video_codec_frame_unref (frame);
  }
  return de265_get_default_image_allocation_functions ()->get_buffer (ctx,
      spec, img, userdata);
}
Beispiel #18
0
static gdouble
gst_compare_ssim (GstCompare * comp, GstBuffer * buf1, GstCaps * caps1,
    GstBuffer * buf2, GstCaps * caps2)
{
  GstVideoInfo info1, info2;
  GstVideoFrame frame1, frame2;
  gint i, comps;
  gdouble cssim[4], ssim, c[4] = { 1.0, 0.0, 0.0, 0.0 };

  if (!caps1)
    goto invalid_input;

  if (!gst_video_info_from_caps (&info1, caps1))
    goto invalid_input;

  if (!caps2)
    goto invalid_input;

  if (!gst_video_info_from_caps (&info2, caps1))
    goto invalid_input;

  if (GST_VIDEO_INFO_FORMAT (&info1) != GST_VIDEO_INFO_FORMAT (&info2) ||
      GST_VIDEO_INFO_WIDTH (&info1) != GST_VIDEO_INFO_WIDTH (&info2) ||
      GST_VIDEO_INFO_HEIGHT (&info1) != GST_VIDEO_INFO_HEIGHT (&info2))
    return comp->threshold + 1;

  comps = GST_VIDEO_INFO_N_COMPONENTS (&info1);
  /* note that some are reported both yuv and gray */
  for (i = 0; i < comps; ++i)
    c[i] = 1.0;
  /* increase luma weight if yuv */
  if (GST_VIDEO_INFO_IS_YUV (&info1) && (comps > 1))
    c[0] = comps - 1;
  for (i = 0; i < comps; ++i)
    c[i] /= (GST_VIDEO_INFO_IS_YUV (&info1) && (comps > 1)) ?
        2 * (comps - 1) : comps;

  gst_video_frame_map (&frame1, &info1, buf1, GST_MAP_READ);
  gst_video_frame_map (&frame2, &info2, buf2, GST_MAP_READ);

  for (i = 0; i < comps; i++) {
    gint cw, ch, step, stride;

    /* only support most common formats */
    if (GST_VIDEO_INFO_COMP_DEPTH (&info1, i) != 8)
      goto unsupported_input;
    cw = GST_VIDEO_FRAME_COMP_WIDTH (&frame1, i);
    ch = GST_VIDEO_FRAME_COMP_HEIGHT (&frame1, i);
    step = GST_VIDEO_FRAME_COMP_PSTRIDE (&frame1, i);
    stride = GST_VIDEO_FRAME_COMP_STRIDE (&frame1, i);

    GST_LOG_OBJECT (comp, "component %d", i);
    cssim[i] = gst_compare_ssim_component (comp,
        GST_VIDEO_FRAME_COMP_DATA (&frame1, i),
        GST_VIDEO_FRAME_COMP_DATA (&frame2, i), cw, ch, step, stride);
    GST_LOG_OBJECT (comp, "ssim[%d] = %f", i, cssim[i]);
  }

  gst_video_frame_unmap (&frame1);
  gst_video_frame_unmap (&frame2);

#ifndef GST_DISABLE_GST_DEBUG
  for (i = 0; i < 4; i++) {
    GST_DEBUG_OBJECT (comp, "ssim[%d] = %f, c[%d] = %f", i, cssim[i], i, c[i]);
  }
#endif

  ssim = cssim[0] * c[0] + cssim[1] * c[1] + cssim[2] * c[2] + cssim[3] * c[3];

  return ssim;

  /* ERRORS */
invalid_input:
  {
    GST_ERROR_OBJECT (comp, "ssim method needs raw video input");
    return 0;
  }
unsupported_input:
  {
    GST_ERROR_OBJECT (comp, "raw video format not supported %" GST_PTR_FORMAT,
        caps1);
    return 0;
  }
}