Example #1
0
void GStreamerReader::ImageDataFromVideoFrame(GstVideoFrame *aFrame,
                                              PlanarYCbCrImage::Data *aData)
{
  NS_ASSERTION(GST_VIDEO_INFO_IS_YUV(&mVideoInfo),
               "Non-YUV video frame formats not supported");
  NS_ASSERTION(GST_VIDEO_FRAME_N_COMPONENTS(aFrame) == 3,
               "Unsupported number of components in video frame");

  aData->mPicX = aData->mPicY = 0;
  aData->mPicSize = gfx::IntSize(mPicture.width, mPicture.height);
  aData->mStereoMode = StereoMode::MONO;

  aData->mYChannel = GST_VIDEO_FRAME_COMP_DATA(aFrame, 0);
  aData->mYStride = GST_VIDEO_FRAME_COMP_STRIDE(aFrame, 0);
  aData->mYSize = gfx::IntSize(GST_VIDEO_FRAME_COMP_WIDTH(aFrame, 0),
                          GST_VIDEO_FRAME_COMP_HEIGHT(aFrame, 0));
  aData->mYSkip = GST_VIDEO_FRAME_COMP_PSTRIDE(aFrame, 0) - 1;
  aData->mCbCrStride = GST_VIDEO_FRAME_COMP_STRIDE(aFrame, 1);
  aData->mCbCrSize = gfx::IntSize(GST_VIDEO_FRAME_COMP_WIDTH(aFrame, 1),
                             GST_VIDEO_FRAME_COMP_HEIGHT(aFrame, 1));
  aData->mCbChannel = GST_VIDEO_FRAME_COMP_DATA(aFrame, 1);
  aData->mCrChannel = GST_VIDEO_FRAME_COMP_DATA(aFrame, 2);
  aData->mCbSkip = GST_VIDEO_FRAME_COMP_PSTRIDE(aFrame, 1) - 1;
  aData->mCrSkip = GST_VIDEO_FRAME_COMP_PSTRIDE(aFrame, 2) - 1;
}
Example #2
0
static void
reconstruct (GstIvtc * ivtc, GstVideoFrame * dest_frame, int i1, int i2)
{
    GstVideoFrame *top, *bottom;
    int width, height;
    int j, k;

    g_return_if_fail (i1 >= 0 && i1 < ivtc->n_fields);
    g_return_if_fail (i2 >= 0 && i2 < ivtc->n_fields);

    if (ivtc->fields[i1].parity == TOP_FIELD) {
        top = &ivtc->fields[i1].frame;
        bottom = &ivtc->fields[i2].frame;
    } else {
        bottom = &ivtc->fields[i1].frame;
        top = &ivtc->fields[i2].frame;
    }

    for (k = 0; k < 3; k++) {
        height = GST_VIDEO_FRAME_COMP_HEIGHT (top, k);
        width = GST_VIDEO_FRAME_COMP_WIDTH (top, k);
        for (j = 0; j < height; j++) {
            guint8 *dest = GET_LINE (dest_frame, k, j);
            guint8 *src = GET_LINE_IL (top, bottom, k, j);

            memcpy (dest, src, width);
        }
    }

}
static void
fill_frame_planar16_3 (GstVideoFrame * frame, opj_image_t * image)
{
  gint c, x, y, w, h;
  guint16 *data_out, *tmp;
  const gint *data_in;
  gint dstride;
  gint shift;

  for (c = 0; c < 3; c++) {
    w = GST_VIDEO_FRAME_COMP_WIDTH (frame, c);
    h = GST_VIDEO_FRAME_COMP_HEIGHT (frame, c);
    dstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, c) / 2;
    data_out = (guint16 *) GST_VIDEO_FRAME_COMP_DATA (frame, c);
    data_in = image->comps[c].data;
    shift = 16 - image->comps[c].prec;

    for (y = 0; y < h; y++) {
      tmp = data_out;

      for (x = 0; x < w; x++) {
        *tmp = *data_in << shift;
        tmp++;
        data_in++;
      }
      data_out += dstride;
    }
  }
}
static void
fill_image_planar8_3 (opj_image_t * image, GstVideoFrame * frame)
{
  gint c, x, y, w, h;
  const guint8 *data_in, *tmp;
  gint *data_out;
  gint sstride;

  for (c = 0; c < 3; c++) {
    w = GST_VIDEO_FRAME_COMP_WIDTH (frame, c);
    h = GST_VIDEO_FRAME_COMP_HEIGHT (frame, c);
    data_in = GST_VIDEO_FRAME_COMP_DATA (frame, c);
    sstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, c);
    data_out = image->comps[c].data;

    for (y = 0; y < h; y++) {
      tmp = data_in;
      for (x = 0; x < w; x++) {
        *data_out = *tmp;
        data_out++;
        tmp++;
      }
      data_in += sstride;
    }
  }
}
Example #5
0
/* Copy the frame data from the GstBuffer (from decoder)
 * to the picture obtained from downstream in VLC.
 * This function should be avoided as much
 * as possible, since it involves a complete frame copy. */
static void gst_CopyPicture( picture_t *p_pic, GstVideoFrame *p_frame )
{
    int i_plane, i_planes, i_line, i_dst_stride, i_src_stride;
    uint8_t *p_dst, *p_src;
    int i_w, i_h;

    i_planes = p_pic->i_planes;
    for( i_plane = 0; i_plane < i_planes; i_plane++ )
    {
        p_dst = p_pic->p[i_plane].p_pixels;
        p_src = GST_VIDEO_FRAME_PLANE_DATA( p_frame, i_plane );
        i_dst_stride = p_pic->p[i_plane].i_pitch;
        i_src_stride = GST_VIDEO_FRAME_PLANE_STRIDE( p_frame, i_plane );

        i_w = GST_VIDEO_FRAME_COMP_WIDTH( p_frame,
                i_plane ) * GST_VIDEO_FRAME_COMP_PSTRIDE( p_frame, i_plane );
        i_h = GST_VIDEO_FRAME_COMP_HEIGHT( p_frame, i_plane );

        for( i_line = 0;
                i_line < __MIN( p_pic->p[i_plane].i_lines, i_h );
                i_line++ )
        {
            memcpy( p_dst, p_src, i_w );
            p_src += i_src_stride;
            p_dst += i_dst_stride;
        }
    }
}
Example #6
0
static void
gst_video_balance_packed_yuv (GstVideoBalance * videobalance,
    GstVideoFrame * frame)
{
  gint x, y, stride;
  guint8 *ydata, *udata, *vdata;
  gint yoff, uoff, voff;
  gint width, height;
  gint width2, height2;
  guint8 *tabley = videobalance->tabley;
  guint8 **tableu = videobalance->tableu;
  guint8 **tablev = videobalance->tablev;

  width = GST_VIDEO_FRAME_WIDTH (frame);
  height = GST_VIDEO_FRAME_HEIGHT (frame);

  stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
  ydata = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
  yoff = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);

  for (y = 0; y < height; y++) {
    guint8 *yptr;

    yptr = ydata + y * stride;
    for (x = 0; x < width; x++) {
      *yptr = tabley[*yptr];
      yptr += yoff;
    }
  }

  width2 = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1);
  height2 = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1);

  udata = GST_VIDEO_FRAME_COMP_DATA (frame, 1);
  vdata = GST_VIDEO_FRAME_COMP_DATA (frame, 2);
  uoff = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 1);
  voff = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 2);

  for (y = 0; y < height2; y++) {
    guint8 *uptr, *vptr;
    guint8 u1, v1;

    uptr = udata + y * stride;
    vptr = vdata + y * stride;

    for (x = 0; x < width2; x++) {
      u1 = *uptr;
      v1 = *vptr;

      *uptr = tableu[u1][v1];
      *vptr = tablev[u1][v1];

      uptr += uoff;
      vptr += voff;
    }
  }
}
Example #7
0
static void
gst_video_balance_semiplanar_yuv (GstVideoBalance * videobalance,
    GstVideoFrame * frame)
{
  gint x, y;
  guint8 *ydata;
  guint8 *uvdata;
  gint ystride, uvstride;
  gint width, height;
  gint width2, height2;
  guint8 *tabley = videobalance->tabley;
  guint8 **tableu = videobalance->tableu;
  guint8 **tablev = videobalance->tablev;
  gint upos, vpos;

  width = GST_VIDEO_FRAME_WIDTH (frame);
  height = GST_VIDEO_FRAME_HEIGHT (frame);

  ydata = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
  ystride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);

  for (y = 0; y < height; y++) {
    guint8 *yptr;

    yptr = ydata + y * ystride;
    for (x = 0; x < width; x++) {
      *yptr = tabley[*yptr];
      yptr++;
    }
  }

  width2 = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1);
  height2 = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1);

  uvdata = GST_VIDEO_FRAME_PLANE_DATA (frame, 1);
  uvstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 1);

  upos = GST_VIDEO_INFO_FORMAT (&frame->info) == GST_VIDEO_FORMAT_NV12 ? 0 : 1;
  vpos = GST_VIDEO_INFO_FORMAT (&frame->info) == GST_VIDEO_FORMAT_NV12 ? 1 : 0;

  for (y = 0; y < height2; y++) {
    guint8 *uvptr;
    guint8 u1, v1;

    uvptr = uvdata + y * uvstride;

    for (x = 0; x < width2; x++) {
      u1 = uvptr[upos];
      v1 = uvptr[vpos];

      uvptr[upos] = tableu[u1][v1];
      uvptr[vpos] = tablev[u1][v1];
      uvptr += 2;
    }
  }
}
Example #8
0
static void
copy_field (GstInterlace * interlace, GstBuffer * dest, GstBuffer * src,
    int field_index)
{
  GstVideoInfo *info = &interlace->info;
  gint i, j, n_planes;
  guint8 *d, *s;
  GstVideoFrame dframe, sframe;

  if (!gst_video_frame_map (&dframe, info, dest, GST_MAP_WRITE))
    goto dest_map_failed;

  if (!gst_video_frame_map (&sframe, info, src, GST_MAP_READ))
    goto src_map_failed;

  n_planes = GST_VIDEO_FRAME_N_PLANES (&dframe);

  for (i = 0; i < n_planes; i++) {
    gint cheight, cwidth;
    gint ss, ds;

    d = GST_VIDEO_FRAME_PLANE_DATA (&dframe, i);
    s = GST_VIDEO_FRAME_PLANE_DATA (&sframe, i);

    ds = GST_VIDEO_FRAME_PLANE_STRIDE (&dframe, i);
    ss = GST_VIDEO_FRAME_PLANE_STRIDE (&sframe, i);

    d += field_index * ds;
    s += field_index * ss;

    cheight = GST_VIDEO_FRAME_COMP_HEIGHT (&dframe, i);
    cwidth = MIN (ABS (ss), ABS (ds));

    for (j = field_index; j < cheight; j += 2) {
      memcpy (d, s, cwidth);
      d += ds * 2;
      s += ss * 2;
    }
  }

  gst_video_frame_unmap (&dframe);
  gst_video_frame_unmap (&sframe);
  return;

dest_map_failed:
  {
    GST_ERROR_OBJECT (interlace, "failed to map dest");
    return;
  }
src_map_failed:
  {
    GST_ERROR_OBJECT (interlace, "failed to map src");
    gst_video_frame_unmap (&dframe);
    return;
  }
}
static opj_image_t *
gst_openjpeg_enc_fill_image (GstOpenJPEGEnc * self, GstVideoFrame * frame)
{
  gint i, ncomps;
  opj_image_cmptparm_t *comps;
  OPJ_COLOR_SPACE colorspace;
  opj_image_t *image;

  ncomps = GST_VIDEO_FRAME_N_COMPONENTS (frame);
  comps = g_new0 (opj_image_cmptparm_t, ncomps);

  for (i = 0; i < ncomps; i++) {
    comps[i].prec = GST_VIDEO_FRAME_COMP_DEPTH (frame, i);
    comps[i].bpp = GST_VIDEO_FRAME_COMP_DEPTH (frame, i);
    comps[i].sgnd = 0;
    comps[i].w = GST_VIDEO_FRAME_COMP_WIDTH (frame, i);
    comps[i].h = GST_VIDEO_FRAME_COMP_HEIGHT (frame, i);
    comps[i].dx =
        GST_VIDEO_FRAME_WIDTH (frame) / GST_VIDEO_FRAME_COMP_WIDTH (frame, i);
    comps[i].dy =
        GST_VIDEO_FRAME_HEIGHT (frame) / GST_VIDEO_FRAME_COMP_HEIGHT (frame, i);
  }

  if ((frame->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_YUV))
    colorspace = OPJ_CLRSPC_SYCC;
  else if ((frame->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_RGB))
    colorspace = OPJ_CLRSPC_SRGB;
  else if ((frame->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_GRAY))
    colorspace = OPJ_CLRSPC_GRAY;
  else
    g_return_val_if_reached (NULL);

  image = opj_image_create (ncomps, comps, colorspace);
  g_free (comps);

  image->x0 = image->y0 = 0;
  image->x1 = GST_VIDEO_FRAME_WIDTH (frame);
  image->y1 = GST_VIDEO_FRAME_HEIGHT (frame);

  self->fill_image (image, frame);

  return image;
}
static GstFlowReturn
gst_smooth_transform_frame (GstVideoFilter * vfilter, GstVideoFrame * in_frame,
    GstVideoFrame * out_frame)
{
  GstSmooth *smooth;

  smooth = GST_SMOOTH (vfilter);

  if (!smooth->active) {
    gst_video_frame_copy (out_frame, in_frame);
    return GST_FLOW_OK;
  }

  smooth_filter (GST_VIDEO_FRAME_COMP_DATA (out_frame, 0),
      GST_VIDEO_FRAME_COMP_DATA (in_frame, 0),
      GST_VIDEO_FRAME_COMP_WIDTH (in_frame, 0),
      GST_VIDEO_FRAME_COMP_HEIGHT (in_frame, 0),
      GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0),
      GST_VIDEO_FRAME_COMP_STRIDE (out_frame, 0),
      smooth->tolerance, smooth->filtersize);
  if (!smooth->luma_only) {
    smooth_filter (GST_VIDEO_FRAME_COMP_DATA (out_frame, 1),
        GST_VIDEO_FRAME_COMP_DATA (in_frame, 1),
        GST_VIDEO_FRAME_COMP_WIDTH (in_frame, 1),
        GST_VIDEO_FRAME_COMP_HEIGHT (in_frame, 1),
        GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 1),
        GST_VIDEO_FRAME_COMP_STRIDE (out_frame, 1),
        smooth->tolerance, smooth->filtersize);
    smooth_filter (GST_VIDEO_FRAME_COMP_DATA (out_frame, 2),
        GST_VIDEO_FRAME_COMP_DATA (in_frame, 2),
        GST_VIDEO_FRAME_COMP_WIDTH (in_frame, 2),
        GST_VIDEO_FRAME_COMP_HEIGHT (in_frame, 2),
        GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 2),
        GST_VIDEO_FRAME_COMP_STRIDE (out_frame, 2),
        smooth->tolerance, smooth->filtersize);
  } else {
    gst_video_frame_copy_plane (out_frame, in_frame, 1);
    gst_video_frame_copy_plane (out_frame, in_frame, 2);
  }

  return GST_FLOW_OK;
}
Example #11
0
/* Allocate buffer and copy image data into Y444 format */
static GstFlowReturn
daala_handle_image (GstDaalaDec * dec, od_img * img, GstVideoCodecFrame * frame)
{
  GstVideoDecoder *decoder = GST_VIDEO_DECODER (dec);
  gint width, height, stride;
  GstFlowReturn result;
  gint i, comp;
  guint8 *dest, *src;
  GstVideoFrame vframe;

  result = gst_video_decoder_allocate_output_frame (decoder, frame);

  if (G_UNLIKELY (result != GST_FLOW_OK)) {
    GST_DEBUG_OBJECT (dec, "could not get buffer, reason: %s",
        gst_flow_get_name (result));
    return result;
  }

  /* if only libdaala would allow us to give it a destination frame */
  GST_CAT_TRACE_OBJECT (GST_CAT_PERFORMANCE, dec,
      "doing unavoidable video frame copy");

  if (G_UNLIKELY (!gst_video_frame_map (&vframe, &dec->output_state->info,
              frame->output_buffer, GST_MAP_WRITE)))
    goto invalid_frame;

  for (comp = 0; comp < 3; comp++) {
    width = GST_VIDEO_FRAME_COMP_WIDTH (&vframe, comp);
    height = GST_VIDEO_FRAME_COMP_HEIGHT (&vframe, comp);
    stride = GST_VIDEO_FRAME_COMP_STRIDE (&vframe, comp);
    dest = GST_VIDEO_FRAME_COMP_DATA (&vframe, comp);

    src = img->planes[comp].data;

    for (i = 0; i < height; i++) {
      memcpy (dest, src, width);

      dest += stride;
      src += img->planes[comp].ystride;
    }
  }
  gst_video_frame_unmap (&vframe);

  return GST_FLOW_OK;
invalid_frame:
  {
    GST_DEBUG_OBJECT (dec, "could not map video frame");
    return GST_FLOW_ERROR;
  }
}
Example #12
0
static void
gst_gamma_packed_rgb_ip (GstGamma * gamma, GstVideoFrame * frame)
{
  gint i, j, height;
  gint width, stride, row_wrap;
  gint pixel_stride;
  const guint8 *table = gamma->gamma_table;
  gint offsets[3];
  gint r, g, b;
  gint y, u, v;
  guint8 *data;

  data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
  stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
  width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0);
  height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 0);

  offsets[0] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 0);
  offsets[1] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 1);
  offsets[2] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 2);

  pixel_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
  row_wrap = stride - pixel_stride * width;

  for (i = 0; i < height; i++) {
    for (j = 0; j < width; j++) {
      r = data[offsets[0]];
      g = data[offsets[1]];
      b = data[offsets[2]];

      y = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 0, r, g, b);
      u = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 1, r, g, b);
      v = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 2, r, g, b);

      y = table[CLAMP (y, 0, 255)];
      r = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 0, y, u, v);
      g = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 1, y, u, v);
      b = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 2, y, u, v);

      data[offsets[0]] = CLAMP (r, 0, 255);
      data[offsets[1]] = CLAMP (g, 0, 255);
      data[offsets[2]] = CLAMP (b, 0, 255);
      data += pixel_stride;
    }
    data += row_wrap;
  }
}
Example #13
0
/**
 * gst_video_frame_copy_plane:
 * @dest: a #GstVideoFrame
 * @src: a #GstVideoFrame
 * @plane: a plane
 *
 * Copy the plane with index @plane from @src to @dest.
 *
 * Returns: TRUE if the contents could be copied.
 */
gboolean
gst_video_frame_copy_plane (GstVideoFrame * dest, const GstVideoFrame * src,
                            guint plane)
{
    const GstVideoInfo *sinfo;
    GstVideoInfo *dinfo;
    guint w, h, j;
    guint8 *sp, *dp;
    gint ss, ds;

    g_return_val_if_fail (dest != NULL, FALSE);
    g_return_val_if_fail (src != NULL, FALSE);

    sinfo = &src->info;
    dinfo = &dest->info;

    g_return_val_if_fail (dinfo->finfo->format == sinfo->finfo->format, FALSE);
    g_return_val_if_fail (dinfo->width == sinfo->width
                          && dinfo->height == sinfo->height, FALSE);
    g_return_val_if_fail (dinfo->finfo->n_planes > plane, FALSE);

    sp = src->data[plane];
    dp = dest->data[plane];

    ss = sinfo->stride[plane];
    ds = dinfo->stride[plane];

    /* FIXME. assumes subsampling of component N is the same as plane N, which is
     * currently true for all formats we have but it might not be in the future. */
    w = GST_VIDEO_FRAME_COMP_WIDTH (dest,
                                    plane) * GST_VIDEO_FRAME_COMP_PSTRIDE (dest, plane);
    h = GST_VIDEO_FRAME_COMP_HEIGHT (dest, plane);

    GST_CAT_DEBUG (GST_CAT_PERFORMANCE, "copy plane %d, w:%d h:%d ", plane, w, h);

    for (j = 0; j < h; j++) {
        memcpy (dp, sp, w);
        dp += ds;
        sp += ss;
    }
    return TRUE;
}
Example #14
0
static void
gst_vp9_dec_image_to_buffer (GstVP9Dec * dec, const vpx_image_t * img,
    GstBuffer * buffer)
{
  int deststride, srcstride, height, width, line, comp;
  guint8 *dest, *src;
  GstVideoFrame frame;
  GstVideoInfo *info = &dec->output_state->info;

  if (!gst_video_frame_map (&frame, info, buffer, GST_MAP_WRITE)) {
    GST_ERROR_OBJECT (dec, "Could not map video buffer");
    return;
  }

  for (comp = 0; comp < 3; comp++) {
    dest = GST_VIDEO_FRAME_COMP_DATA (&frame, comp);
    src = img->planes[comp];
    width = GST_VIDEO_FRAME_COMP_WIDTH (&frame, comp)
        * GST_VIDEO_FRAME_COMP_PSTRIDE (&frame, comp);
    height = GST_VIDEO_FRAME_COMP_HEIGHT (&frame, comp);
    deststride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, comp);
    srcstride = img->stride[comp];

    if (srcstride == deststride) {
      GST_TRACE_OBJECT (dec, "Stride matches. Comp %d: %d, copying full plane",
          comp, srcstride);
      memcpy (dest, src, srcstride * height);
    } else {
      GST_TRACE_OBJECT (dec, "Stride mismatch. Comp %d: %d != %d, copying "
          "line by line.", comp, srcstride, deststride);
      for (line = 0; line < height; line++) {
        memcpy (dest, src, width);
        dest += deststride;
        src += srcstride;
      }
    }
  }

  gst_video_frame_unmap (&frame);
}
Example #15
0
static void
gst_gamma_planar_yuv_ip (GstGamma * gamma, GstVideoFrame * frame)
{
  gint i, j, height;
  gint width, stride, row_wrap;
  const guint8 *table = gamma->gamma_table;
  guint8 *data;

  data = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
  stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
  width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0);
  height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 0);
  row_wrap = stride - width;

  for (i = 0; i < height; i++) {
    for (j = 0; j < width; j++) {
      *data = table[*data];
      data++;
    }
    data += row_wrap;
  }
}
static void
sink_handoff_cb_I420 (GstElement * object, GstBuffer * buffer, GstPad * pad,
    gpointer user_data)
{
  guint *sink_pos = (guint *) user_data;
  gboolean contains_text = (*sink_pos == 1 || *sink_pos == 2);
  guint c, i, j;
  gboolean all_red = TRUE;
  guint8 *comp;
  gint comp_stride, comp_width, comp_height;
  const guint8 color[] = { 81, 90, 240 };
  GstVideoInfo info;
  GstVideoFrame frame;

  gst_video_info_init (&info);
  gst_video_info_set_format (&info, GST_VIDEO_FORMAT_I420, 640, 480);

  gst_video_frame_map (&frame, &info, buffer, GST_MAP_READ);

  for (c = 0; c < 3; c++) {
    comp = GST_VIDEO_FRAME_COMP_DATA (&frame, c);
    comp_stride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, c);
    comp_width = GST_VIDEO_FRAME_COMP_WIDTH (&frame, c);
    comp_height = GST_VIDEO_FRAME_COMP_HEIGHT (&frame, c);

    for (i = 0; i < comp_height; i++) {
      for (j = 0; j < comp_width; j++) {
        all_red = all_red && (comp[i * comp_stride + j] == color[c]);
      }
    }
  }
  gst_video_frame_unmap (&frame);

  fail_unless (contains_text != all_red,
      "Frame %d is incorrect (all red %d, contains text %d)", *sink_pos,
      all_red, contains_text);
  *sink_pos = *sink_pos + 1;
}
static void
fill_image_planar16_1 (opj_image_t * image, GstVideoFrame * frame)
{
  gint x, y, w, h;
  const guint16 *data_in, *tmp;
  gint *data_out;
  gint sstride;

  w = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0);
  h = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 0);
  data_in = (guint16 *) GST_VIDEO_FRAME_COMP_DATA (frame, 0);
  sstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0) / 2;
  data_out = image->comps[0].data;

  for (y = 0; y < h; y++) {
    tmp = data_in;
    for (x = 0; x < w; x++) {
      *data_out = *tmp;
      data_out++;
      tmp++;
    }
    data_in += sstride;
  }
}
static void
deinterlace_frame_di_greedyh_planar (GstDeinterlaceMethod * method,
    const GstDeinterlaceField * history, guint history_count,
    GstVideoFrame * outframe, int cur_field_idx)
{
  GstDeinterlaceMethodGreedyH *self = GST_DEINTERLACE_METHOD_GREEDY_H (method);
  GstDeinterlaceMethodGreedyHClass *klass =
      GST_DEINTERLACE_METHOD_GREEDY_H_GET_CLASS (self);
  gint InfoIsOdd;
  gint RowStride;
  gint FieldHeight;
  gint Pitch;
  const guint8 *L1;             // ptr to Line1, of 3
  const guint8 *L2;             // ptr to Line2, the weave line
  const guint8 *L3;             // ptr to Line3
  const guint8 *L2P;            // ptr to prev Line2
  guint8 *Dest;
  gint i;
  ScanlineFunction scanline;

  if (cur_field_idx + 2 > history_count || cur_field_idx < 1) {
    GstDeinterlaceMethod *backup_method;

    backup_method = g_object_new (gst_deinterlace_method_linear_get_type (),
        NULL);

    gst_deinterlace_method_setup (backup_method, method->vinfo);
    gst_deinterlace_method_deinterlace_frame (backup_method,
        history, history_count, outframe, cur_field_idx);

    g_object_unref (backup_method);
    return;
  }

  cur_field_idx += 2;

  for (i = 0; i < 3; i++) {
    InfoIsOdd = (history[cur_field_idx - 1].flags == PICTURE_INTERLACED_BOTTOM);
    RowStride = GST_VIDEO_FRAME_COMP_STRIDE (outframe, i);
    FieldHeight = GST_VIDEO_FRAME_COMP_HEIGHT (outframe, i) / 2;
    Pitch = RowStride * 2;

    if (i == 0)
      scanline = klass->scanline_planar_y;
    else
      scanline = klass->scanline_planar_uv;

    Dest = GST_VIDEO_FRAME_COMP_DATA (outframe, i);

    L1 = GST_VIDEO_FRAME_COMP_DATA (history[cur_field_idx - 2].frame, i);
    if (history[cur_field_idx - 2].flags & PICTURE_INTERLACED_BOTTOM)
      L1 += RowStride;

    L2 = GST_VIDEO_FRAME_COMP_DATA (history[cur_field_idx - 1].frame, i);
    if (history[cur_field_idx - 1].flags & PICTURE_INTERLACED_BOTTOM)
      L2 += RowStride;

    L3 = L1 + Pitch;
    L2P = GST_VIDEO_FRAME_COMP_DATA (history[cur_field_idx - 3].frame, i);
    if (history[cur_field_idx - 3].flags & PICTURE_INTERLACED_BOTTOM)
      L2P += RowStride;

    deinterlace_frame_di_greedyh_planar_plane (self, L1, L2, L3, L2P, Dest,
        RowStride, FieldHeight, Pitch, InfoIsOdd, scanline);
  }
}
Example #19
0
static void
reconstruct_single (GstIvtc * ivtc, GstVideoFrame * dest_frame, int i1)
{
    int j;
    int k;
    int height;
    int width;
    GstIvtcField *field = &ivtc->fields[i1];

    for (k = 0; k < 1; k++) {
        height = GST_VIDEO_FRAME_COMP_HEIGHT (dest_frame, k);
        width = GST_VIDEO_FRAME_COMP_WIDTH (dest_frame, k);
        for (j = 0; j < height; j++) {
            if ((j & 1) == field->parity) {
                memcpy (GET_LINE (dest_frame, k, j),
                        GET_LINE (&field->frame, k, j), width);
            } else {
                if (j == 0 || j == height - 1) {
                    memcpy (GET_LINE (dest_frame, k, j),
                            GET_LINE (&field->frame, k, (j ^ 1)), width);
                } else {
                    guint8 *dest = GET_LINE (dest_frame, k, j);
                    guint8 *line1 = GET_LINE (&field->frame, k, j - 1);
                    guint8 *line2 = GET_LINE (&field->frame, k, j + 1);
                    int i;

#define MARGIN 3
                    for (i = MARGIN; i < width - MARGIN; i++) {
                        int dx, dy;

                        dx = -line1[i - 1] - line2[i - 1] + line1[i + 1] + line2[i + 1];
                        dx *= 2;

                        dy = -line1[i - 1] - 2 * line1[i] - line1[i + 1]
                             + line2[i - 1] + 2 * line2[i] + line2[i + 1];
                        if (dy < 0) {
                            dy = -dy;
                            dx = -dx;
                        }

                        if (dx == 0 && dy == 0) {
                            dest[i] = (line1[i] + line2[i] + 1) >> 1;
                        } else if (dx < 0) {
                            if (dx < -2 * dy) {
                                dest[i] = reconstruct_line (line1, line2, i, 0, 0, 0, 16);
                            } else if (dx < -dy) {
                                dest[i] = reconstruct_line (line1, line2, i, 0, 0, 8, 8);
                            } else if (2 * dx < -dy) {
                                dest[i] = reconstruct_line (line1, line2, i, 0, 4, 8, 4);
                            } else if (3 * dx < -dy) {
                                dest[i] = reconstruct_line (line1, line2, i, 1, 7, 7, 1);
                            } else {
                                dest[i] = reconstruct_line (line1, line2, i, 4, 8, 4, 0);
                            }
                        } else {
                            if (dx > 2 * dy) {
                                dest[i] = reconstruct_line (line2, line1, i, 0, 0, 0, 16);
                            } else if (dx > dy) {
                                dest[i] = reconstruct_line (line2, line1, i, 0, 0, 8, 8);
                            } else if (2 * dx > dy) {
                                dest[i] = reconstruct_line (line2, line1, i, 0, 4, 8, 4);
                            } else if (3 * dx > dy) {
                                dest[i] = reconstruct_line (line2, line1, i, 1, 7, 7, 1);
                            } else {
                                dest[i] = reconstruct_line (line2, line1, i, 4, 8, 4, 0);
                            }
                        }
                    }

                    for (i = 0; i < MARGIN; i++) {
                        dest[i] = (line1[i] + line2[i] + 1) >> 1;
                    }
                    for (i = width - MARGIN; i < width; i++) {
                        dest[i] = (line1[i] + line2[i] + 1) >> 1;
                    }
                }
            }
        }
Example #20
0
static GstFlowReturn
gst_comb_detect_transform_frame (GstVideoFilter * filter,
    GstVideoFrame * inframe, GstVideoFrame * outframe)
{
  static int z;
  int k;
  int height;
  int width;

#define GET_LINE(frame,comp,line) (((unsigned char *)(frame)->data[k]) + \
      (line) * GST_VIDEO_FRAME_COMP_STRIDE((frame), (comp)))

  z++;

  for (k = 1; k < 3; k++) {
    int i;
    height = GST_VIDEO_FRAME_COMP_HEIGHT (outframe, k);
    width = GST_VIDEO_FRAME_COMP_WIDTH (outframe, k);
    for (i = 0; i < height; i++) {
      memcpy (GET_LINE (outframe, k, i), GET_LINE (inframe, k, i), width);
    }
  }

  {
    int j;
    int thisline[MAX_WIDTH];
    int score = 0;

    height = GST_VIDEO_FRAME_COMP_HEIGHT (outframe, 0);
    width = GST_VIDEO_FRAME_COMP_WIDTH (outframe, 0);

    memset (thisline, 0, sizeof (thisline));

    k = 0;
    for (j = 0; j < height; j++) {
      int i;
      if (j < 2 || j >= height - 2) {
        guint8 *dest = GET_LINE (outframe, 0, j);
        guint8 *src = GET_LINE (inframe, 0, j);
        for (i = 0; i < width; i++) {
          dest[i] = src[i] / 2;
        }
      } else {
        guint8 *dest = GET_LINE (outframe, 0, j);
        guint8 *src1 = GET_LINE (inframe, 0, j - 1);
        guint8 *src2 = GET_LINE (inframe, 0, j);
        guint8 *src3 = GET_LINE (inframe, 0, j + 1);

        for (i = 0; i < width; i++) {
          if (src2[i] < MIN (src1[i], src3[i]) - 5 ||
              src2[i] > MAX (src1[i], src3[i]) + 5) {
            if (i > 0) {
              thisline[i] += thisline[i - 1];
            }
            thisline[i]++;
            if (thisline[i] > 1000)
              thisline[i] = 1000;
          } else {
            thisline[i] = 0;
          }
          if (thisline[i] > 100) {
            dest[i] = ((i + j + z) & 0x4) ? 235 : 16;
            score++;
          } else {
            dest[i] = src2[i];
          }
        }
      }
    }

    if (score > 10)
      GST_DEBUG ("score %d", score);
  }

  return GST_FLOW_OK;
}
static GstFlowReturn
gst_y4m_dec_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
  GstY4mDec *y4mdec;
  int n_avail;
  GstFlowReturn flow_ret = GST_FLOW_OK;
#define MAX_HEADER_LENGTH 80
  char header[MAX_HEADER_LENGTH];
  int i;
  int len;

  y4mdec = GST_Y4M_DEC (parent);

  GST_DEBUG_OBJECT (y4mdec, "chain");

  if (GST_BUFFER_IS_DISCONT (buffer)) {
    GST_DEBUG ("got discont");
    gst_adapter_clear (y4mdec->adapter);
  }

  gst_adapter_push (y4mdec->adapter, buffer);
  n_avail = gst_adapter_available (y4mdec->adapter);

  if (!y4mdec->have_header) {
    gboolean ret;
    GstCaps *caps;
    GstQuery *query;

    if (n_avail < MAX_HEADER_LENGTH)
      return GST_FLOW_OK;

    gst_adapter_copy (y4mdec->adapter, (guint8 *) header, 0, MAX_HEADER_LENGTH);

    header[MAX_HEADER_LENGTH - 1] = 0;
    for (i = 0; i < MAX_HEADER_LENGTH; i++) {
      if (header[i] == 0x0a)
        header[i] = 0;
    }

    ret = gst_y4m_dec_parse_header (y4mdec, header);
    if (!ret) {
      GST_ELEMENT_ERROR (y4mdec, STREAM, DECODE,
          ("Failed to parse YUV4MPEG header"), (NULL));
      return GST_FLOW_ERROR;
    }

    y4mdec->header_size = strlen (header) + 1;
    gst_adapter_flush (y4mdec->adapter, y4mdec->header_size);

    caps = gst_video_info_to_caps (&y4mdec->info);
    ret = gst_pad_set_caps (y4mdec->srcpad, caps);

    query = gst_query_new_allocation (caps, FALSE);
    y4mdec->video_meta = FALSE;

    if (y4mdec->pool) {
      gst_buffer_pool_set_active (y4mdec->pool, FALSE);
      gst_object_unref (y4mdec->pool);
    }
    y4mdec->pool = NULL;

    if (gst_pad_peer_query (y4mdec->srcpad, query)) {
      y4mdec->video_meta =
          gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);

      /* We only need a pool if we need to do stride conversion for downstream */
      if (!y4mdec->video_meta && memcmp (&y4mdec->info, &y4mdec->out_info,
              sizeof (y4mdec->info)) != 0) {
        GstBufferPool *pool = NULL;
        GstAllocator *allocator = NULL;
        GstAllocationParams params;
        GstStructure *config;
        guint size, min, max;

        if (gst_query_get_n_allocation_params (query) > 0) {
          gst_query_parse_nth_allocation_param (query, 0, &allocator, &params);
        } else {
          allocator = NULL;
          gst_allocation_params_init (&params);
        }

        if (gst_query_get_n_allocation_pools (query) > 0) {
          gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min,
              &max);
          size = MAX (size, y4mdec->out_info.size);
        } else {
          pool = NULL;
          size = y4mdec->out_info.size;
          min = max = 0;
        }

        if (pool == NULL) {
          pool = gst_video_buffer_pool_new ();
        }

        config = gst_buffer_pool_get_config (pool);
        gst_buffer_pool_config_set_params (config, caps, size, min, max);
        gst_buffer_pool_config_set_allocator (config, allocator, &params);
        gst_buffer_pool_set_config (pool, config);

        if (allocator)
          gst_object_unref (allocator);

        y4mdec->pool = pool;
      }
    } else if (memcmp (&y4mdec->info, &y4mdec->out_info,
            sizeof (y4mdec->info)) != 0) {
      GstBufferPool *pool;
      GstStructure *config;

      /* No pool, create our own if we need to do stride conversion */
      pool = gst_video_buffer_pool_new ();
      config = gst_buffer_pool_get_config (pool);
      gst_buffer_pool_config_set_params (config, caps, y4mdec->out_info.size, 0,
          0);
      gst_buffer_pool_set_config (pool, config);
      y4mdec->pool = pool;
    }
    if (y4mdec->pool) {
      gst_buffer_pool_set_active (y4mdec->pool, TRUE);
    }
    gst_query_unref (query);
    gst_caps_unref (caps);
    if (!ret) {
      GST_DEBUG_OBJECT (y4mdec, "Couldn't set caps on src pad");
      return GST_FLOW_ERROR;
    }

    y4mdec->have_header = TRUE;
  }

  if (y4mdec->have_new_segment) {
    GstEvent *event;
    GstClockTime start = gst_y4m_dec_bytes_to_timestamp (y4mdec,
        y4mdec->segment.start);
    GstClockTime stop = gst_y4m_dec_bytes_to_timestamp (y4mdec,
        y4mdec->segment.stop);
    GstClockTime time = gst_y4m_dec_bytes_to_timestamp (y4mdec,
        y4mdec->segment.time);
    GstSegment seg;

    gst_segment_init (&seg, GST_FORMAT_TIME);
    seg.start = start;
    seg.stop = stop;
    seg.time = time;
    event = gst_event_new_segment (&seg);

    gst_pad_push_event (y4mdec->srcpad, event);
    //gst_event_unref (event);

    y4mdec->have_new_segment = FALSE;
    y4mdec->frame_index = gst_y4m_dec_bytes_to_frames (y4mdec,
        y4mdec->segment.time);
    GST_DEBUG ("new frame_index %d", y4mdec->frame_index);

  }

  while (1) {
    n_avail = gst_adapter_available (y4mdec->adapter);
    if (n_avail < MAX_HEADER_LENGTH)
      break;

    gst_adapter_copy (y4mdec->adapter, (guint8 *) header, 0, MAX_HEADER_LENGTH);
    header[MAX_HEADER_LENGTH - 1] = 0;
    for (i = 0; i < MAX_HEADER_LENGTH; i++) {
      if (header[i] == 0x0a)
        header[i] = 0;
    }
    if (memcmp (header, "FRAME", 5) != 0) {
      GST_ELEMENT_ERROR (y4mdec, STREAM, DECODE,
          ("Failed to parse YUV4MPEG frame"), (NULL));
      flow_ret = GST_FLOW_ERROR;
      break;
    }

    len = strlen (header);
    if (n_avail < y4mdec->info.size + len + 1) {
      /* not enough data */
      GST_DEBUG ("not enough data for frame %d < %" G_GSIZE_FORMAT,
          n_avail, y4mdec->info.size + len + 1);
      break;
    }

    gst_adapter_flush (y4mdec->adapter, len + 1);

    buffer = gst_adapter_take_buffer (y4mdec->adapter, y4mdec->info.size);

    GST_BUFFER_TIMESTAMP (buffer) =
        gst_y4m_dec_frames_to_timestamp (y4mdec, y4mdec->frame_index);
    GST_BUFFER_DURATION (buffer) =
        gst_y4m_dec_frames_to_timestamp (y4mdec, y4mdec->frame_index + 1) -
        GST_BUFFER_TIMESTAMP (buffer);

    y4mdec->frame_index++;

    if (y4mdec->video_meta) {
      gst_buffer_add_video_meta_full (buffer, 0, y4mdec->info.finfo->format,
          y4mdec->info.width, y4mdec->info.height, y4mdec->info.finfo->n_planes,
          y4mdec->info.offset, y4mdec->info.stride);
    } else if (memcmp (&y4mdec->info, &y4mdec->out_info,
            sizeof (y4mdec->info)) != 0) {
      GstBuffer *outbuf;
      GstVideoFrame iframe, oframe;
      gint i, j;
      gint w, h, istride, ostride;
      guint8 *src, *dest;

      /* Allocate a new buffer and do stride conversion */
      g_assert (y4mdec->pool != NULL);

      flow_ret = gst_buffer_pool_acquire_buffer (y4mdec->pool, &outbuf, NULL);
      if (flow_ret != GST_FLOW_OK) {
        gst_buffer_unref (buffer);
        break;
      }

      gst_video_frame_map (&iframe, &y4mdec->info, buffer, GST_MAP_READ);
      gst_video_frame_map (&oframe, &y4mdec->out_info, outbuf, GST_MAP_WRITE);

      for (i = 0; i < 3; i++) {
        w = GST_VIDEO_FRAME_COMP_WIDTH (&iframe, i);
        h = GST_VIDEO_FRAME_COMP_HEIGHT (&iframe, i);
        istride = GST_VIDEO_FRAME_COMP_STRIDE (&iframe, i);
        ostride = GST_VIDEO_FRAME_COMP_STRIDE (&oframe, i);
        src = GST_VIDEO_FRAME_COMP_DATA (&iframe, i);
        dest = GST_VIDEO_FRAME_COMP_DATA (&oframe, i);

        for (j = 0; j < h; j++) {
          memcpy (dest, src, w);

          dest += ostride;
          src += istride;
        }
      }

      gst_video_frame_unmap (&iframe);
      gst_video_frame_unmap (&oframe);
      gst_buffer_copy_into (outbuf, buffer, GST_BUFFER_COPY_TIMESTAMPS, 0, -1);
      gst_buffer_unref (buffer);
      buffer = outbuf;
    }

    flow_ret = gst_pad_push (y4mdec->srcpad, buffer);
    if (flow_ret != GST_FLOW_OK)
      break;
  }

  GST_DEBUG ("returning %d", flow_ret);

  return flow_ret;
}
Example #22
0
static int
gst_libde265_dec_get_buffer (de265_decoder_context * ctx,
    struct de265_image_spec *spec, struct de265_image *img, void *userdata)
{
  GstVideoDecoder *base = (GstVideoDecoder *) userdata;
  GstLibde265Dec *dec = GST_LIBDE265_DEC (base);
  GstVideoCodecFrame *frame = NULL;
  int i;
  int width = spec->width;
  int height = spec->height;
  GstFlowReturn ret;
  struct GstLibde265FrameRef *ref;
  GstVideoInfo *info;
  int frame_number;

  frame_number = (uintptr_t) de265_get_image_user_data (img) - 1;
  if (G_UNLIKELY (frame_number == -1)) {
    /* should not happen... */
    GST_WARNING_OBJECT (base, "Frame has no number assigned!");
    goto fallback;
  }

  frame = gst_video_decoder_get_frame (base, frame_number);
  if (G_UNLIKELY (frame == NULL)) {
    /* should not happen... */
    GST_WARNING_OBJECT (base, "Couldn't get codec frame!");
    goto fallback;
  }

  if (width % spec->alignment) {
    width += spec->alignment - (width % spec->alignment);
  }
  if (width != spec->visible_width || height != spec->visible_height) {
    /* clipping not supported for now */
    goto fallback;
  }

  ret = _gst_libde265_image_available (base, width, height);
  if (G_UNLIKELY (ret != GST_FLOW_OK)) {
    GST_ERROR_OBJECT (dec, "Failed to notify about available image");
    goto fallback;
  }

  ret =
      gst_video_decoder_allocate_output_frame (GST_VIDEO_DECODER (dec), frame);
  if (G_UNLIKELY (ret != GST_FLOW_OK)) {
    GST_ERROR_OBJECT (dec, "Failed to allocate output buffer");
    goto fallback;
  }

  ref = (struct GstLibde265FrameRef *) g_malloc0 (sizeof (*ref));
  g_assert (ref != NULL);
  ref->decoder = base;
  ref->frame = frame;

  gst_buffer_replace (&ref->buffer, frame->output_buffer);
  gst_buffer_replace (&frame->output_buffer, NULL);

  info = &dec->output_state->info;
  if (!gst_video_frame_map (&ref->vframe, info, ref->buffer, GST_MAP_READWRITE)) {
    GST_ERROR_OBJECT (dec, "Failed to map frame output buffer");
    goto error;
  }

  ref->mapped = TRUE;
  if (GST_VIDEO_FRAME_PLANE_STRIDE (&ref->vframe,
          0) < width * GST_VIDEO_FRAME_COMP_PSTRIDE (&ref->vframe, 0)) {
    GST_DEBUG_OBJECT (dec, "plane 0: pitch too small (%d/%d*%d)",
        GST_VIDEO_FRAME_PLANE_STRIDE (&ref->vframe, 0), width,
        GST_VIDEO_FRAME_COMP_PSTRIDE (&ref->vframe, 0));
    goto error;
  }

  if (GST_VIDEO_FRAME_COMP_HEIGHT (&ref->vframe, 0) < height) {
    GST_DEBUG_OBJECT (dec, "plane 0: lines too few (%d/%d)",
        GST_VIDEO_FRAME_COMP_HEIGHT (&ref->vframe, 0), height);
    goto error;
  }

  for (i = 0; i < 3; i++) {
    uint8_t *data;
    int stride = GST_VIDEO_FRAME_PLANE_STRIDE (&ref->vframe, i);
    if (stride % spec->alignment) {
      GST_DEBUG_OBJECT (dec, "plane %d: pitch not aligned (%d%%%d)",
          i, stride, spec->alignment);
      goto error;
    }

    data = GST_VIDEO_FRAME_PLANE_DATA (&ref->vframe, i);
    if ((uintptr_t) (data) % spec->alignment) {
      GST_DEBUG_OBJECT (dec, "plane %d not aligned", i);
      goto error;
    }

    de265_set_image_plane (img, i, data, stride, ref);
  }
  return 1;

error:
  gst_libde265_dec_release_frame_ref (ref);
  frame = NULL;

fallback:
  if (frame != NULL) {
    gst_video_codec_frame_unref (frame);
  }
  return de265_get_default_image_allocation_functions ()->get_buffer (ctx,
      spec, img, userdata);
}
Example #23
0
static GstFlowReturn
gst_jpeg_dec_decode_direct (GstJpegDec * dec, GstVideoFrame * frame)
{
  guchar **line[3];             /* the jpeg line buffer         */
  guchar *y[4 * DCTSIZE] = { NULL, };   /* alloc enough for the lines   */
  guchar *u[4 * DCTSIZE] = { NULL, };   /* r_v will be <4               */
  guchar *v[4 * DCTSIZE] = { NULL, };
  gint i, j;
  gint lines, v_samp[3];
  guchar *base[3], *last[3];
  gint stride[3];
  guint height;

  line[0] = y;
  line[1] = u;
  line[2] = v;

  v_samp[0] = dec->cinfo.comp_info[0].v_samp_factor;
  v_samp[1] = dec->cinfo.comp_info[1].v_samp_factor;
  v_samp[2] = dec->cinfo.comp_info[2].v_samp_factor;

  if (G_UNLIKELY (v_samp[0] > 2 || v_samp[1] > 2 || v_samp[2] > 2))
    goto format_not_supported;

  height = GST_VIDEO_FRAME_HEIGHT (frame);

  for (i = 0; i < 3; i++) {
    base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
    stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
    /* make sure we don't make jpeglib write beyond our buffer,
     * which might happen if (height % (r_v*DCTSIZE)) != 0 */
    last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
        (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
  }

  /* let jpeglib decode directly into our final buffer */
  GST_DEBUG_OBJECT (dec, "decoding directly into output buffer");

  for (i = 0; i < height; i += v_samp[0] * DCTSIZE) {
    for (j = 0; j < (v_samp[0] * DCTSIZE); ++j) {
      /* Y */
      line[0][j] = base[0] + (i + j) * stride[0];
      if (G_UNLIKELY (line[0][j] > last[0]))
        line[0][j] = last[0];
      /* U */
      if (v_samp[1] == v_samp[0]) {
        line[1][j] = base[1] + ((i + j) / 2) * stride[1];
      } else if (j < (v_samp[1] * DCTSIZE)) {
        line[1][j] = base[1] + ((i / 2) + j) * stride[1];
      }
      if (G_UNLIKELY (line[1][j] > last[1]))
        line[1][j] = last[1];
      /* V */
      if (v_samp[2] == v_samp[0]) {
        line[2][j] = base[2] + ((i + j) / 2) * stride[2];
      } else if (j < (v_samp[2] * DCTSIZE)) {
        line[2][j] = base[2] + ((i / 2) + j) * stride[2];
      }
      if (G_UNLIKELY (line[2][j] > last[2]))
        line[2][j] = last[2];
    }

    lines = jpeg_read_raw_data (&dec->cinfo, line, v_samp[0] * DCTSIZE);
    if (G_UNLIKELY (!lines)) {
      GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
    }
  }
  return GST_FLOW_OK;

format_not_supported:
  {
    gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
        "Unsupported subsampling schema: v_samp factors: %u %u %u",
        v_samp[0], v_samp[1], v_samp[2]);
    return GST_FLOW_ERROR;
  }
}
static GstFlowReturn gst_openh264dec_handle_frame(GstVideoDecoder *decoder, GstVideoCodecFrame *frame)
{
    GstOpenh264Dec *openh264dec = GST_OPENH264DEC(decoder);
    GstMapInfo map_info;
    GstVideoCodecState *state;
    SBufferInfo dst_buf_info;
    DECODING_STATE ret;
    guint8 *yuvdata[3];
    GstFlowReturn flow_status;
    GstVideoFrame video_frame;
    guint actual_width, actual_height;
    guint i;
    guint8 *p;
    guint row_stride, component_width, component_height, src_width, row;

    if (frame) {
        if (!gst_buffer_map(frame->input_buffer, &map_info, GST_MAP_READ)) {
            GST_ERROR_OBJECT(openh264dec, "Cannot map input buffer!");
            return GST_FLOW_ERROR;
        }

        GST_LOG_OBJECT(openh264dec, "handle frame, %d", map_info.size > 4 ? map_info.data[4] & 0x1f : -1);

        memset (&dst_buf_info, 0, sizeof (SBufferInfo));
        ret = openh264dec->priv->decoder->DecodeFrame2(map_info.data, map_info.size, yuvdata, &dst_buf_info);

        if (ret == dsNoParamSets) {
            GST_DEBUG_OBJECT(openh264dec, "Requesting a key unit");
            gst_pad_push_event(GST_VIDEO_DECODER_SINK_PAD(decoder),
                gst_video_event_new_upstream_force_key_unit(GST_CLOCK_TIME_NONE, FALSE, 0));
        }

        if (ret != dsErrorFree && ret != dsNoParamSets) {
            GST_DEBUG_OBJECT(openh264dec, "Requesting a key unit");
            gst_pad_push_event(GST_VIDEO_DECODER_SINK_PAD(decoder),
                               gst_video_event_new_upstream_force_key_unit(GST_CLOCK_TIME_NONE, FALSE, 0));
            GST_LOG_OBJECT(openh264dec, "error decoding nal, return code: %d", ret);
        }

        gst_buffer_unmap(frame->input_buffer, &map_info);
        gst_video_codec_frame_unref (frame);
        frame = NULL;
    } else {
        memset (&dst_buf_info, 0, sizeof (SBufferInfo));
        ret = openh264dec->priv->decoder->DecodeFrame2(NULL, 0, yuvdata, &dst_buf_info);
        if (ret != dsErrorFree)
            return GST_FLOW_EOS;
    }

    /* FIXME: openh264 has no way for us to get a connection
     * between the input and output frames, we just have to
     * guess based on the input. Fortunately openh264 can
     * only do baseline profile. */
    frame = gst_video_decoder_get_oldest_frame (decoder);
    if (!frame) {
      /* Can only happen in finish() */
      return GST_FLOW_EOS;
    }

    /* No output available yet */
    if (dst_buf_info.iBufferStatus != 1) {
        return (frame ? GST_FLOW_OK : GST_FLOW_EOS);
    }

    actual_width  = dst_buf_info.UsrData.sSystemBuffer.iWidth;
    actual_height = dst_buf_info.UsrData.sSystemBuffer.iHeight;

    if (!gst_pad_has_current_caps (GST_VIDEO_DECODER_SRC_PAD (openh264dec)) || actual_width != openh264dec->priv->width || actual_height != openh264dec->priv->height) {
        state = gst_video_decoder_set_output_state(decoder,
            GST_VIDEO_FORMAT_I420,
            actual_width,
            actual_height,
            openh264dec->priv->input_state);
        openh264dec->priv->width = actual_width;
        openh264dec->priv->height = actual_height;

        if (!gst_video_decoder_negotiate(decoder)) {
            GST_ERROR_OBJECT(openh264dec, "Failed to negotiate with downstream elements");
            return GST_FLOW_NOT_NEGOTIATED;
        }
    } else {
        state = gst_video_decoder_get_output_state(decoder);
    }

    flow_status = gst_video_decoder_allocate_output_frame(decoder, frame);
    if (flow_status != GST_FLOW_OK) {
        gst_video_codec_state_unref (state);
        return flow_status;
    }

    if (!gst_video_frame_map(&video_frame, &state->info, frame->output_buffer, GST_MAP_WRITE)) {
        GST_ERROR_OBJECT(openh264dec, "Cannot map output buffer!");
        gst_video_codec_state_unref (state);
        return GST_FLOW_ERROR;
    }

    for (i = 0; i < 3; i++) {
        p = GST_VIDEO_FRAME_COMP_DATA(&video_frame, i);
        row_stride = GST_VIDEO_FRAME_COMP_STRIDE(&video_frame, i);
        component_width = GST_VIDEO_FRAME_COMP_WIDTH(&video_frame, i);
        component_height = GST_VIDEO_FRAME_COMP_HEIGHT(&video_frame, i);
        src_width = i < 1 ? dst_buf_info.UsrData.sSystemBuffer.iStride[0] : dst_buf_info.UsrData.sSystemBuffer.iStride[1];
        for (row = 0; row < component_height; row++) {
            memcpy(p, yuvdata[i], component_width);
            p += row_stride;
            yuvdata[i] += src_width;
        }
    }
    gst_video_codec_state_unref (state);
    gst_video_frame_unmap(&video_frame);

    return gst_video_decoder_finish_frame(decoder, frame);
}
Example #25
0
static GstFlowReturn
gst_mfc_dec_fill_outbuf (GstMFCDec * self, GstBuffer * outbuf,
    struct mfc_buffer *mfc_outbuf, GstVideoCodecState * state)
{
  GstFlowReturn ret = GST_FLOW_OK;
  const guint8 *mfc_outbuf_comps[3] = { NULL, };
  gint i, j, h, w, src_stride, dst_stride;
  guint8 *dst_, *src_;
  GstVideoFrame vframe;
  Fimc *fimc = self->fimc;
  gboolean zerocopy, has_cropping;

  memset (&vframe, 0, sizeof (vframe));

  zerocopy = TRUE;
  /* FIXME: Not 100% correct, we need the memory of each
   * plane to be contiguous at least */
  if (GST_VIDEO_INFO_N_PLANES (&state->info) > gst_buffer_n_memory (outbuf)) {
    zerocopy = FALSE;
  } else {
    gint n = gst_buffer_n_memory (outbuf);

    for (i = 0; i < n; i++) {
      GstMemory *mem = gst_buffer_peek_memory (outbuf, i);

      if (!GST_MEMORY_IS_PHYSICALLY_CONTIGUOUS (mem)) {
        zerocopy = FALSE;
        break;
      }
    }
  }

  has_cropping = self->has_cropping && (self->width != self->crop_width
      || self->height != self->crop_height);

  /* We only do cropping if we do zerocopy and downstream
   * supports cropping. For non-zerocopy we can do cropping
   * more efficient.
   * We can't do cropping ourself with zerocopy because
   * FIMC returns EFAULT when queueing the destination
   * buffers
   */
  if (zerocopy && has_cropping) {
    GstVideoCropMeta *crop;

    crop = gst_buffer_add_video_crop_meta (outbuf);
    crop->x = self->crop_left;
    crop->y = self->crop_top;
    crop->width = self->crop_width;
    crop->height = self->crop_height;
  }

  if (!gst_video_frame_map (&vframe, &state->info, outbuf, GST_MAP_WRITE))
    goto frame_map_error;

  mfc_buffer_get_output_data (mfc_outbuf, (void **) &mfc_outbuf_comps[0],
      (void **) &mfc_outbuf_comps[1]);

  if (zerocopy && (has_cropping || (self->width == self->crop_width
              && self->height == self->crop_height))) {
    void *dst[3];

    if (self->mmap || !self->fimc) {
      if (!gst_mfc_dec_create_fimc (self, state))
        goto fimc_create_error;

      fimc = self->fimc;

      if (self->format == GST_VIDEO_FORMAT_NV12) {
        self->dst_stride[0] = GST_ROUND_UP_4 (self->width);
        self->dst_stride[1] = GST_ROUND_UP_4 (self->width);
        self->dst_stride[2] = 0;
      } else {
        self->dst_stride[0] = GST_ROUND_UP_4 (self->width);
        self->dst_stride[1] = GST_ROUND_UP_4 ((self->width + 1) / 2);
        self->dst_stride[2] = GST_ROUND_UP_4 ((self->width + 1) / 2);
      }

      if (has_cropping) {
        if (fimc_set_dst_format (fimc, self->fimc_format, self->width,
                self->height, self->dst_stride, 0, 0, self->width,
                self->height) < 0)
          goto fimc_dst_error;
      } else {
        if (fimc_set_dst_format (fimc, self->fimc_format, self->width,
                self->height, self->dst_stride, self->crop_left,
                self->crop_top, self->crop_width, self->crop_height) < 0)
          goto fimc_dst_error;
      }
      self->mmap = FALSE;

      if (fimc_request_dst_buffers (fimc) < 0)
        goto fimc_dst_requestbuffers_error;

      self->dst[0] = NULL;
      self->dst[1] = NULL;
      self->dst[2] = NULL;
    }

    dst[0] = GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
    dst[1] = GST_VIDEO_FRAME_PLANE_DATA (&vframe, 1);
    if (self->format == GST_VIDEO_FORMAT_NV12)
      dst[2] = NULL;
    else
      dst[2] = GST_VIDEO_FRAME_PLANE_DATA (&vframe, 2);

    if (fimc_convert (fimc, (void **) mfc_outbuf_comps, (void **) dst) < 0)
      goto fimc_convert_error;
  } else {
    if (!self->mmap || !self->fimc) {
      if (!gst_mfc_dec_create_fimc (self, state))
        goto fimc_create_error;

      self->dst_stride[0] = 0;
      self->dst_stride[1] = 0;
      self->dst_stride[2] = 0;
      self->mmap = TRUE;
      fimc = self->fimc;
    }

    if (!self->dst[0]) {
      if (fimc_set_dst_format (fimc, self->fimc_format, self->width,
              self->height, self->dst_stride, self->crop_left,
              self->crop_top, self->crop_width, self->crop_height) < 0)
        goto fimc_dst_error;

      if (fimc_request_dst_buffers_mmap (fimc, self->dst, self->dst_stride) < 0)
        goto fimc_dst_requestbuffers_error;
    }

    if (fimc_convert (fimc, (void **) mfc_outbuf_comps,
            (void **) self->dst) < 0)
      goto fimc_convert_error;

    switch (state->info.finfo->format) {
      case GST_VIDEO_FORMAT_RGBx:
        dst_ = (guint8 *) GST_VIDEO_FRAME_COMP_DATA (&vframe, 0);
        src_ = self->dst[0];
        src_stride = self->dst_stride[0];
        h = GST_VIDEO_FRAME_HEIGHT (&vframe);
        w = GST_VIDEO_FRAME_WIDTH (&vframe);
        dst_stride = GST_VIDEO_FRAME_COMP_STRIDE (&vframe, 0);
        for (i = 0; i < h; i++) {
          memcpy (dst_, src_, w);
          dst_ += dst_stride;
          src_ += src_stride;
        }
        break;
      case GST_VIDEO_FORMAT_I420:
      case GST_VIDEO_FORMAT_YV12:
        for (j = 0; j < 3; j++) {
          dst_ = (guint8 *) GST_VIDEO_FRAME_COMP_DATA (&vframe, j);
          src_ = self->dst[j];
          src_stride = self->dst_stride[j];
          h = GST_VIDEO_FRAME_COMP_HEIGHT (&vframe, j);
          w = GST_VIDEO_FRAME_COMP_WIDTH (&vframe, j);
          dst_stride = GST_VIDEO_FRAME_COMP_STRIDE (&vframe, j);
          for (i = 0; i < h; i++) {
            memcpy (dst_, src_, w);
            dst_ += dst_stride;
            src_ += src_stride;
          }
        }
        break;
      case GST_VIDEO_FORMAT_NV12:
        for (j = 0; j < 2; j++) {
          dst_ = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, j);
          src_ = self->dst[j];
          src_stride = self->dst_stride[j];
          h = GST_VIDEO_FRAME_COMP_HEIGHT (&vframe, j);
          w = GST_VIDEO_FRAME_COMP_WIDTH (&vframe, j) * (j == 0 ? 1 : 2);
          dst_stride = GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, j);
          for (i = 0; i < h; i++) {
            memcpy (dst_, src_, w);
            dst_ += dst_stride;
            src_ += src_stride;
          }
        }
        break;
      default:
        g_assert_not_reached ();
        break;
    }
  }

done:
  if (vframe.buffer)
    gst_video_frame_unmap (&vframe);

  return ret;

frame_map_error:
  {
    GST_ELEMENT_ERROR (self, CORE, FAILED, ("Failed to map output buffer"),
        (NULL));
    ret = GST_FLOW_ERROR;
    goto done;
  }

fimc_create_error:
  {
    ret = GST_FLOW_ERROR;
    goto done;
  }

fimc_dst_error:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, FAILED,
        ("Failed to set FIMC destination parameters"), (NULL));
    ret = GST_FLOW_ERROR;
    goto done;
  }

fimc_dst_requestbuffers_error:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, FAILED,
        ("Failed to request FIMC destination buffers"), (NULL));
    ret = GST_FLOW_ERROR;
    goto done;
  }

fimc_convert_error:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, FAILED,
        ("Failed to convert via FIMC"), (NULL));
    ret = GST_FLOW_ERROR;
    goto done;
  }
}
Example #26
0
static void
gst_jpeg_dec_decode_indirect (GstJpegDec * dec, GstVideoFrame * frame, gint r_v,
    gint r_h, gint comp)
{
  guchar *y_rows[16], *u_rows[16], *v_rows[16];
  guchar **scanarray[3] = { y_rows, u_rows, v_rows };
  gint i, j, k;
  gint lines;
  guchar *base[3], *last[3];
  gint stride[3];
  gint width, height;

  GST_DEBUG_OBJECT (dec,
      "unadvantageous width or r_h, taking slow route involving memcpy");

  width = GST_VIDEO_FRAME_WIDTH (frame);
  height = GST_VIDEO_FRAME_HEIGHT (frame);

  if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
    return;

  for (i = 0; i < 3; i++) {
    base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
    stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
    /* make sure we don't make jpeglib write beyond our buffer,
     * which might happen if (height % (r_v*DCTSIZE)) != 0 */
    last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
        (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
  }

  memcpy (y_rows, dec->idr_y, 16 * sizeof (gpointer));
  memcpy (u_rows, dec->idr_u, 16 * sizeof (gpointer));
  memcpy (v_rows, dec->idr_v, 16 * sizeof (gpointer));

  /* fill chroma components for grayscale */
  if (comp == 1) {
    GST_DEBUG_OBJECT (dec, "grayscale, filling chroma");
    for (i = 0; i < 16; i++) {
      memset (u_rows[i], GST_ROUND_UP_32 (width), 0x80);
      memset (v_rows[i], GST_ROUND_UP_32 (width), 0x80);
    }
  }

  for (i = 0; i < height; i += r_v * DCTSIZE) {
    lines = jpeg_read_raw_data (&dec->cinfo, scanarray, r_v * DCTSIZE);
    if (G_LIKELY (lines > 0)) {
      for (j = 0, k = 0; j < (r_v * DCTSIZE); j += r_v, k++) {
        if (G_LIKELY (base[0] <= last[0])) {
          memcpy (base[0], y_rows[j], stride[0]);
          base[0] += stride[0];
        }
        if (r_v == 2) {
          if (G_LIKELY (base[0] <= last[0])) {
            memcpy (base[0], y_rows[j + 1], stride[0]);
            base[0] += stride[0];
          }
        }
        if (G_LIKELY (base[1] <= last[1] && base[2] <= last[2])) {
          if (r_h == 2) {
            memcpy (base[1], u_rows[k], stride[1]);
            memcpy (base[2], v_rows[k], stride[2]);
          } else if (r_h == 1) {
            hresamplecpy1 (base[1], u_rows[k], stride[1]);
            hresamplecpy1 (base[2], v_rows[k], stride[2]);
          } else {
            /* FIXME: implement (at least we avoid crashing by doing nothing) */
          }
        }

        if (r_v == 2 || (k & 1) != 0) {
          base[1] += stride[1];
          base[2] += stride[2];
        }
      }
    } else {
      GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
    }
  }
}
Example #27
0
static GstFlowReturn
gst_vtenc_encode_frame (GstVTEnc * self, GstBuffer * buf)
{
  GstVTApi *vt = self->ctx->vt;
  CMTime ts, duration;
  GstCoreMediaMeta *meta;
  CVPixelBufferRef pbuf = NULL;
  VTStatus vt_status;
  GstFlowReturn ret = GST_FLOW_OK;
  guint i;

  self->cur_inbuf = buf;

  ts = CMTimeMake (GST_TIME_AS_MSECONDS (GST_BUFFER_TIMESTAMP (buf)), 1000);
  duration = CMTimeMake
      (GST_TIME_AS_MSECONDS (GST_BUFFER_DURATION (buf)), 1000);

  meta = gst_buffer_get_core_media_meta (buf);
  if (meta != NULL) {
    pbuf = gst_core_media_buffer_get_pixel_buffer (buf);
  }

  if (pbuf == NULL) {
    GstVTEncFrame *frame;
    CVReturn cv_ret;

    frame = gst_vtenc_frame_new (buf, &self->video_info);
    if (!frame)
      goto cv_error;

    {
      const size_t num_planes = GST_VIDEO_FRAME_N_PLANES (&frame->videoframe);
      void *plane_base_addresses[GST_VIDEO_MAX_PLANES];
      size_t plane_widths[GST_VIDEO_MAX_PLANES];
      size_t plane_heights[GST_VIDEO_MAX_PLANES];
      size_t plane_bytes_per_row[GST_VIDEO_MAX_PLANES];
      OSType pixel_format_type;
      size_t i;

      for (i = 0; i < num_planes; i++) {
        plane_base_addresses[i] =
            GST_VIDEO_FRAME_PLANE_DATA (&frame->videoframe, i);
        plane_widths[i] = GST_VIDEO_FRAME_COMP_WIDTH (&frame->videoframe, i);
        plane_heights[i] = GST_VIDEO_FRAME_COMP_HEIGHT (&frame->videoframe, i);
        plane_bytes_per_row[i] =
            GST_VIDEO_FRAME_COMP_STRIDE (&frame->videoframe, i);
        plane_bytes_per_row[i] =
            GST_VIDEO_FRAME_COMP_STRIDE (&frame->videoframe, i);
      }

      switch (GST_VIDEO_INFO_FORMAT (&self->video_info)) {
        case GST_VIDEO_FORMAT_I420:
          pixel_format_type = kCVPixelFormatType_420YpCbCr8Planar;
          break;
        case GST_VIDEO_FORMAT_NV12:
          pixel_format_type = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
          break;
        default:
          goto cv_error;
      }

      cv_ret = CVPixelBufferCreateWithPlanarBytes (NULL,
          self->negotiated_width, self->negotiated_height,
          pixel_format_type,
          frame,
          GST_VIDEO_FRAME_SIZE (&frame->videoframe),
          num_planes,
          plane_base_addresses,
          plane_widths,
          plane_heights,
          plane_bytes_per_row, gst_pixel_buffer_release_cb, frame, NULL, &pbuf);
      if (cv_ret != kCVReturnSuccess) {
        gst_vtenc_frame_free (frame);
        goto cv_error;
      }
    }
  }

  GST_OBJECT_LOCK (self);

  self->expect_keyframe = CFDictionaryContainsKey (self->options,
      *(vt->kVTEncodeFrameOptionKey_ForceKeyFrame));
  if (self->expect_keyframe)
    gst_vtenc_clear_cached_caps_downstream (self);

  vt_status = self->ctx->vt->VTCompressionSessionEncodeFrame (self->session,
      pbuf, ts, duration, self->options, NULL, NULL);

  if (vt_status != 0) {
    GST_WARNING_OBJECT (self, "VTCompressionSessionEncodeFrame returned %d",
        vt_status);
  }

  self->ctx->vt->VTCompressionSessionCompleteFrames (self->session,
      kCMTimeInvalid);

  GST_OBJECT_UNLOCK (self);

  CVPixelBufferRelease (pbuf);
  self->cur_inbuf = NULL;
  gst_buffer_unref (buf);

  if (self->cur_outbufs->len > 0) {
    meta =
        gst_buffer_get_core_media_meta (g_ptr_array_index (self->cur_outbufs,
            0));
    if (!gst_vtenc_negotiate_downstream (self, meta->sample_buf))
      ret = GST_FLOW_NOT_NEGOTIATED;
  }

  for (i = 0; i != self->cur_outbufs->len; i++) {
    GstBuffer *buf = g_ptr_array_index (self->cur_outbufs, i);

    if (ret == GST_FLOW_OK) {
      ret = gst_pad_push (self->srcpad, buf);
    } else {
      gst_buffer_unref (buf);
    }
  }
  g_ptr_array_set_size (self->cur_outbufs, 0);

  return ret;

cv_error:
  {
    self->cur_inbuf = NULL;
    gst_buffer_unref (buf);

    return GST_FLOW_ERROR;
  }
}
Example #28
0
static gdouble
gst_compare_ssim (GstCompare * comp, GstBuffer * buf1, GstCaps * caps1,
    GstBuffer * buf2, GstCaps * caps2)
{
  GstVideoInfo info1, info2;
  GstVideoFrame frame1, frame2;
  gint i, comps;
  gdouble cssim[4], ssim, c[4] = { 1.0, 0.0, 0.0, 0.0 };

  if (!caps1)
    goto invalid_input;

  if (!gst_video_info_from_caps (&info1, caps1))
    goto invalid_input;

  if (!caps2)
    goto invalid_input;

  if (!gst_video_info_from_caps (&info2, caps1))
    goto invalid_input;

  if (GST_VIDEO_INFO_FORMAT (&info1) != GST_VIDEO_INFO_FORMAT (&info2) ||
      GST_VIDEO_INFO_WIDTH (&info1) != GST_VIDEO_INFO_WIDTH (&info2) ||
      GST_VIDEO_INFO_HEIGHT (&info1) != GST_VIDEO_INFO_HEIGHT (&info2))
    return comp->threshold + 1;

  comps = GST_VIDEO_INFO_N_COMPONENTS (&info1);
  /* note that some are reported both yuv and gray */
  for (i = 0; i < comps; ++i)
    c[i] = 1.0;
  /* increase luma weight if yuv */
  if (GST_VIDEO_INFO_IS_YUV (&info1) && (comps > 1))
    c[0] = comps - 1;
  for (i = 0; i < comps; ++i)
    c[i] /= (GST_VIDEO_INFO_IS_YUV (&info1) && (comps > 1)) ?
        2 * (comps - 1) : comps;

  gst_video_frame_map (&frame1, &info1, buf1, GST_MAP_READ);
  gst_video_frame_map (&frame2, &info2, buf2, GST_MAP_READ);

  for (i = 0; i < comps; i++) {
    gint cw, ch, step, stride;

    /* only support most common formats */
    if (GST_VIDEO_INFO_COMP_DEPTH (&info1, i) != 8)
      goto unsupported_input;
    cw = GST_VIDEO_FRAME_COMP_WIDTH (&frame1, i);
    ch = GST_VIDEO_FRAME_COMP_HEIGHT (&frame1, i);
    step = GST_VIDEO_FRAME_COMP_PSTRIDE (&frame1, i);
    stride = GST_VIDEO_FRAME_COMP_STRIDE (&frame1, i);

    GST_LOG_OBJECT (comp, "component %d", i);
    cssim[i] = gst_compare_ssim_component (comp,
        GST_VIDEO_FRAME_COMP_DATA (&frame1, i),
        GST_VIDEO_FRAME_COMP_DATA (&frame2, i), cw, ch, step, stride);
    GST_LOG_OBJECT (comp, "ssim[%d] = %f", i, cssim[i]);
  }

  gst_video_frame_unmap (&frame1);
  gst_video_frame_unmap (&frame2);

#ifndef GST_DISABLE_GST_DEBUG
  for (i = 0; i < 4; i++) {
    GST_DEBUG_OBJECT (comp, "ssim[%d] = %f, c[%d] = %f", i, cssim[i], i, c[i]);
  }
#endif

  ssim = cssim[0] * c[0] + cssim[1] * c[1] + cssim[2] * c[2] + cssim[3] * c[3];

  return ssim;

  /* ERRORS */
invalid_input:
  {
    GST_ERROR_OBJECT (comp, "ssim method needs raw video input");
    return 0;
  }
unsupported_input:
  {
    GST_ERROR_OBJECT (comp, "raw video format not supported %" GST_PTR_FORMAT,
        caps1);
    return 0;
  }
}
Example #29
0
static GstFlowReturn
gst_mpeg2dec_crop_buffer (GstMpeg2dec * dec, GstVideoCodecFrame * in_frame,
    GstVideoFrame * input_vframe)
{
  GstVideoCodecState *state;
  GstVideoInfo *info;
  GstVideoInfo *dinfo;
  guint c, n_planes;
  GstVideoFrame output_frame;
  GstFlowReturn ret;

  state = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (dec));
  info = &state->info;
  dinfo = &dec->decoded_info;

  GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, dec,
      "Copying input buffer %ux%u (%" G_GSIZE_FORMAT ") to output buffer "
      "%ux%u (%" G_GSIZE_FORMAT ")", dinfo->width, dinfo->height,
      dinfo->size, info->width, info->height, info->size);

  ret =
      gst_video_decoder_allocate_output_frame (GST_VIDEO_DECODER (dec),
      in_frame);
  if (ret != GST_FLOW_OK)
    goto beach;

  if (!gst_video_frame_map (&output_frame, info, in_frame->output_buffer,
          GST_MAP_WRITE))
    goto map_fail;

  n_planes = GST_VIDEO_FRAME_N_PLANES (&output_frame);
  for (c = 0; c < n_planes; c++) {
    guint w, h, j;
    guint8 *sp, *dp;
    gint ss, ds;

    sp = GST_VIDEO_FRAME_PLANE_DATA (input_vframe, c);
    dp = GST_VIDEO_FRAME_PLANE_DATA (&output_frame, c);

    ss = GST_VIDEO_FRAME_PLANE_STRIDE (input_vframe, c);
    ds = GST_VIDEO_FRAME_PLANE_STRIDE (&output_frame, c);

    w = MIN (ABS (ss), ABS (ds));
    h = GST_VIDEO_FRAME_COMP_HEIGHT (&output_frame, c);

    GST_CAT_DEBUG (GST_CAT_PERFORMANCE, "copy plane %u, w:%u h:%u ", c, w, h);

    for (j = 0; j < h; j++) {
      memcpy (dp, sp, w);
      dp += ds;
      sp += ss;
    }
  }

  gst_video_frame_unmap (&output_frame);

  GST_BUFFER_FLAGS (in_frame->output_buffer) =
      GST_BUFFER_FLAGS (input_vframe->buffer);

beach:
  gst_video_codec_state_unref (state);

  return ret;

map_fail:
  {
    GST_ERROR_OBJECT (dec, "Failed to map output frame");
    gst_video_codec_state_unref (state);
    return GST_FLOW_ERROR;
  }
}
Example #30
0
static GstFlowReturn
gst_compositor_aggregate_frames (GstVideoAggregator * vagg, GstBuffer * outbuf)
{
  GList *l;
  GstCompositor *self = GST_COMPOSITOR (vagg);
  BlendFunction composite;
  GstVideoFrame out_frame, *outframe;

  if (!gst_video_frame_map (&out_frame, &vagg->info, outbuf, GST_MAP_WRITE)) {

    return GST_FLOW_ERROR;
  }

  outframe = &out_frame;
  /* default to blending */
  composite = self->blend;
  switch (self->background) {
    case COMPOSITOR_BACKGROUND_CHECKER:
      self->fill_checker (outframe);
      break;
    case COMPOSITOR_BACKGROUND_BLACK:
      self->fill_color (outframe, 16, 128, 128);
      break;
    case COMPOSITOR_BACKGROUND_WHITE:
      self->fill_color (outframe, 240, 128, 128);
      break;
    case COMPOSITOR_BACKGROUND_TRANSPARENT:
    {
      guint i, plane, num_planes, height;

      num_planes = GST_VIDEO_FRAME_N_PLANES (outframe);
      for (plane = 0; plane < num_planes; ++plane) {
        guint8 *pdata;
        gsize rowsize, plane_stride;

        pdata = GST_VIDEO_FRAME_PLANE_DATA (outframe, plane);
        plane_stride = GST_VIDEO_FRAME_PLANE_STRIDE (outframe, plane);
        rowsize = GST_VIDEO_FRAME_COMP_WIDTH (outframe, plane)
            * GST_VIDEO_FRAME_COMP_PSTRIDE (outframe, plane);
        height = GST_VIDEO_FRAME_COMP_HEIGHT (outframe, plane);
        for (i = 0; i < height; ++i) {
          memset (pdata, 0, rowsize);
          pdata += plane_stride;
        }
      }

      /* use overlay to keep background transparent */
      composite = self->overlay;
      break;
    }
  }

  GST_OBJECT_LOCK (vagg);
  for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
    GstVideoAggregatorPad *pad = l->data;
    GstCompositorPad *compo_pad = GST_COMPOSITOR_PAD (pad);

    if (pad->aggregated_frame != NULL) {
      composite (pad->aggregated_frame, compo_pad->xpos, compo_pad->ypos,
          compo_pad->alpha, outframe);
    }
  }
  GST_OBJECT_UNLOCK (vagg);

  gst_video_frame_unmap (outframe);

  return GST_FLOW_OK;
}