示例#1
0
void GStreamerReader::ImageDataFromVideoFrame(GstVideoFrame *aFrame,
                                              PlanarYCbCrImage::Data *aData)
{
  NS_ASSERTION(GST_VIDEO_INFO_IS_YUV(&mVideoInfo),
               "Non-YUV video frame formats not supported");
  NS_ASSERTION(GST_VIDEO_FRAME_N_COMPONENTS(aFrame) == 3,
               "Unsupported number of components in video frame");

  aData->mPicX = aData->mPicY = 0;
  aData->mPicSize = gfx::IntSize(mPicture.width, mPicture.height);
  aData->mStereoMode = StereoMode::MONO;

  aData->mYChannel = GST_VIDEO_FRAME_COMP_DATA(aFrame, 0);
  aData->mYStride = GST_VIDEO_FRAME_COMP_STRIDE(aFrame, 0);
  aData->mYSize = gfx::IntSize(GST_VIDEO_FRAME_COMP_WIDTH(aFrame, 0),
                          GST_VIDEO_FRAME_COMP_HEIGHT(aFrame, 0));
  aData->mYSkip = GST_VIDEO_FRAME_COMP_PSTRIDE(aFrame, 0) - 1;
  aData->mCbCrStride = GST_VIDEO_FRAME_COMP_STRIDE(aFrame, 1);
  aData->mCbCrSize = gfx::IntSize(GST_VIDEO_FRAME_COMP_WIDTH(aFrame, 1),
                             GST_VIDEO_FRAME_COMP_HEIGHT(aFrame, 1));
  aData->mCbChannel = GST_VIDEO_FRAME_COMP_DATA(aFrame, 1);
  aData->mCrChannel = GST_VIDEO_FRAME_COMP_DATA(aFrame, 2);
  aData->mCbSkip = GST_VIDEO_FRAME_COMP_PSTRIDE(aFrame, 1) - 1;
  aData->mCrSkip = GST_VIDEO_FRAME_COMP_PSTRIDE(aFrame, 2) - 1;
}
static opj_image_t *
gst_openjpeg_enc_fill_image (GstOpenJPEGEnc * self, GstVideoFrame * frame)
{
  gint i, ncomps;
  opj_image_cmptparm_t *comps;
  OPJ_COLOR_SPACE colorspace;
  opj_image_t *image;

  ncomps = GST_VIDEO_FRAME_N_COMPONENTS (frame);
  comps = g_new0 (opj_image_cmptparm_t, ncomps);

  for (i = 0; i < ncomps; i++) {
    comps[i].prec = GST_VIDEO_FRAME_COMP_DEPTH (frame, i);
    comps[i].bpp = GST_VIDEO_FRAME_COMP_DEPTH (frame, i);
    comps[i].sgnd = 0;
    comps[i].w = GST_VIDEO_FRAME_COMP_WIDTH (frame, i);
    comps[i].h = GST_VIDEO_FRAME_COMP_HEIGHT (frame, i);
    comps[i].dx =
        GST_VIDEO_FRAME_WIDTH (frame) / GST_VIDEO_FRAME_COMP_WIDTH (frame, i);
    comps[i].dy =
        GST_VIDEO_FRAME_HEIGHT (frame) / GST_VIDEO_FRAME_COMP_HEIGHT (frame, i);
  }

  if ((frame->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_YUV))
    colorspace = OPJ_CLRSPC_SYCC;
  else if ((frame->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_RGB))
    colorspace = OPJ_CLRSPC_SRGB;
  else if ((frame->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_GRAY))
    colorspace = OPJ_CLRSPC_GRAY;
  else
    g_return_val_if_reached (NULL);

  image = opj_image_create (ncomps, comps, colorspace);
  g_free (comps);

  image->x0 = image->y0 = 0;
  image->x1 = GST_VIDEO_FRAME_WIDTH (frame);
  image->y1 = GST_VIDEO_FRAME_HEIGHT (frame);

  self->fill_image (image, frame);

  return image;
}
static void
check_1x1_buffer (GstBuffer * buf, GstCaps * caps)
{
  GstVideoInfo info;
  GstVideoFrame frame;
  /* the exact values we check for come from videotestsrc */
  static const guint yuv_values[] = { 81, 90, 240, 255 };
  static const guint rgb_values[] = { 0xff, 0, 0, 255 };
  static const guint gray8_values[] = { 0x51 };
  static const guint gray16_values[] = { 0x5151 };
  const guint *values;
  guint i;
  const GstVideoFormatInfo *finfo;

  fail_unless (buf != NULL);
  fail_unless (caps != NULL);

  fail_unless (gst_video_info_from_caps (&info, caps));
  fail_unless (gst_video_frame_map (&frame, &info, buf, GST_MAP_READ));

  finfo = info.finfo;

  if (GST_VIDEO_INFO_IS_YUV (&info))
    values = yuv_values;
  else if (GST_VIDEO_INFO_IS_GRAY (&info))
    if (GST_VIDEO_FORMAT_INFO_BITS (finfo) == 8)
      values = gray8_values;
    else
      values = gray16_values;
  else
    values = rgb_values;

  GST_MEMDUMP ("buffer", GST_VIDEO_FRAME_PLANE_DATA (&frame, 0), 8);

  for (i = 0; i < GST_VIDEO_FRAME_N_COMPONENTS (&frame); i++) {
    guint8 *data = GST_VIDEO_FRAME_COMP_DATA (&frame, i);

    GST_DEBUG ("W: %d", GST_VIDEO_FORMAT_INFO_W_SUB (finfo, i));
    GST_DEBUG ("H: %d", GST_VIDEO_FORMAT_INFO_H_SUB (finfo, i));

    if (GST_VIDEO_FORMAT_INFO_W_SUB (finfo,
            i) >= GST_VIDEO_FRAME_WIDTH (&frame))
      continue;
    if (GST_VIDEO_FORMAT_INFO_H_SUB (finfo,
            i) >= GST_VIDEO_FRAME_HEIGHT (&frame))
      continue;

    if (GST_VIDEO_FORMAT_INFO_BITS (finfo) == 8) {
      fail_unless_equals_int (data[0], values[i]);
    } else if (GST_VIDEO_FORMAT_INFO_BITS (finfo) == 16) {
      guint16 pixels, val;
      gint depth;

      if (GST_VIDEO_FORMAT_INFO_IS_LE (finfo))
        pixels = GST_READ_UINT16_LE (data);
      else
        pixels = GST_READ_UINT16_BE (data);

      depth = GST_VIDEO_FORMAT_INFO_DEPTH (finfo, i);
      val = pixels >> GST_VIDEO_FORMAT_INFO_SHIFT (finfo, i);
      val = val & ((1 << depth) - 1);

      GST_DEBUG ("val %08x %d : %d", pixels, i, val);
      if (depth <= 8) {
        fail_unless_equals_int (val, values[i] >> (8 - depth));
      } else {