コード例 #1
0
ファイル: gstpnmdec.c プロジェクト: bilboed/gst-plugins-bad
static GstFlowReturn
gst_pnmdec_push (GstPnmdec * s, GstPad * src, GstBuffer * buf)
{
  /* Need to convert from PNM rowstride to GStreamer rowstride */
  if (s->mngr.info.width % 4 != 0) {
    guint i_rowstride;
    guint o_rowstride;
    GstBuffer *obuf;
    guint i;

    if (s->mngr.info.type == GST_PNM_TYPE_PIXMAP_RAW ||
        s->mngr.info.type == GST_PNM_TYPE_PIXMAP_ASCII) {
      i_rowstride = 3 * s->mngr.info.width;
      o_rowstride = GST_ROUND_UP_4 (i_rowstride);
    } else {
      i_rowstride = s->mngr.info.width;
      o_rowstride = GST_ROUND_UP_4 (i_rowstride);
    }

    obuf = gst_buffer_new_and_alloc (o_rowstride * s->mngr.info.height);

    gst_buffer_copy_metadata (obuf, buf, GST_BUFFER_COPY_ALL);

    for (i = 0; i < s->mngr.info.height; i++)
      memcpy (GST_BUFFER_DATA (obuf) + i * o_rowstride,
          GST_BUFFER_DATA (buf) + i * i_rowstride, i_rowstride);

    gst_buffer_unref (buf);
    return gst_pad_push (src, obuf);
  } else {
    return gst_pad_push (src, buf);
  }
}
コード例 #2
0
ファイル: mxfup.c プロジェクト: pli3/gst-plugins-bad
static GstFlowReturn
mxf_up_handle_essence_element (const MXFUL * key, GstBuffer * buffer,
    GstCaps * caps,
    MXFMetadataTimelineTrack * track,
    gpointer mapping_data, GstBuffer ** outbuf)
{
  MXFUPMappingData *data = mapping_data;

  /* SMPTE 384M 7.1 */
  if (key->u[12] != 0x15 || (key->u[14] != 0x01 && key->u[14] != 0x02
          && key->u[14] != 0x03 && key->u[14] != 0x04)) {
    GST_ERROR ("Invalid uncompressed picture essence element");
    return GST_FLOW_ERROR;
  }

  if (!data || (data->image_start_offset == 0 && data->image_end_offset == 0)) {
  } else {
    if (data->image_start_offset + data->image_end_offset
        > GST_BUFFER_SIZE (buffer)) {
      gst_buffer_unref (buffer);
      GST_ERROR ("Invalid buffer size");
      return GST_FLOW_ERROR;
    } else {
      GST_BUFFER_DATA (buffer) += data->image_start_offset;
      GST_BUFFER_SIZE (buffer) -= data->image_start_offset;
      GST_BUFFER_SIZE (buffer) -= data->image_end_offset;
    }
  }

  if (GST_BUFFER_SIZE (buffer) != data->bpp * data->width * data->height) {
    GST_ERROR ("Invalid buffer size");
    return GST_FLOW_ERROR;
  }

  if (data->bpp != 4
      || GST_ROUND_UP_4 (data->width * data->bpp) != data->width * data->bpp) {
    guint y;
    GstBuffer *ret;
    guint8 *indata, *outdata;

    ret =
        gst_buffer_new_and_alloc (GST_ROUND_UP_4 (data->width * data->bpp) *
        data->height);
    indata = GST_BUFFER_DATA (buffer);
    outdata = GST_BUFFER_DATA (ret);

    for (y = 0; y < data->height; y++) {
      memcpy (outdata, indata, data->width * data->bpp);
      outdata += GST_ROUND_UP_4 (data->width * data->bpp);
      indata += data->width * data->bpp;
    }

    gst_buffer_unref (buffer);
    *outbuf = ret;
  } else {
    *outbuf = buffer;
  }

  return GST_FLOW_OK;
}
コード例 #3
0
static gboolean
gst_phoenixsrc_set_caps (GstBaseSrc * bsrc, GstCaps * caps)
{
  GstPhoenixSrc *src = GST_PHOENIX_SRC (bsrc);
  GstVideoInfo vinfo;
  GstStructure *s = gst_caps_get_structure (caps, 0);

  GST_DEBUG_OBJECT (src, "The caps being set are %" GST_PTR_FORMAT, caps);

  gst_video_info_from_caps (&vinfo, caps);

  if (g_str_equal ("video/x-bayer", gst_structure_get_name (s))) {
    gint width;
    const gchar *format;
    gst_structure_get_int (s, "width", &width);
    gst_structure_get_int (s, "height", &src->height);
    format = gst_structure_get_string (s, "format");
    if (g_str_has_suffix (format, "16"))
      src->gst_stride = GST_ROUND_UP_4 (width * 2);
    else
      src->gst_stride = GST_ROUND_UP_4 (width);
  } else if (GST_VIDEO_INFO_FORMAT (&vinfo) != GST_VIDEO_FORMAT_UNKNOWN) {
    src->gst_stride = GST_VIDEO_INFO_COMP_STRIDE (&vinfo, 0);
    src->height = vinfo.height;
  } else {
    goto unsupported_caps;
  }

  return TRUE;

unsupported_caps:
  GST_ERROR_OBJECT (src, "Unsupported caps: %" GST_PTR_FORMAT, caps);
  return FALSE;
}
コード例 #4
0
gint
gst_xvid_image_fill (xvid_image_t * im, void *ptr, gint csp,
    gint width, gint height)
{
  gint stride, h2, size = 0;

  im->csp = csp;

  switch (csp) {
    case XVID_CSP_I420:
    case XVID_CSP_YV12:
      /* planar */
      /* luma */
      stride = GST_ROUND_UP_4 (width);
      h2 = GST_ROUND_UP_2 (height);
      im->stride[0] = stride;
      im->plane[0] = ptr;
      /* chroma */
      im->plane[1] = ((guint8 *) im->plane[0]) + (stride * h2);
      size += stride * height;
      stride = GST_ROUND_UP_8 (width) / 2;
      h2 = GST_ROUND_UP_2 (height) / 2;
      im->stride[1] = stride;

      im->plane[2] = ((guint8 *) im->plane[1]) + (stride * h2);
      im->stride[2] = stride;
      size += 2 * (stride * h2);
      break;
    case XVID_CSP_RGB555:
    case XVID_CSP_RGB565:
    case XVID_CSP_YUY2:
    case XVID_CSP_UYVY:
    case XVID_CSP_YVYU:
      /* packed */
      stride = GST_ROUND_UP_4 (width * 2);
      im->plane[0] = ptr;
      im->stride[0] = stride;
      size = stride * height;
      break;
    case XVID_CSP_BGR:
      stride = GST_ROUND_UP_4 (width * 3);
      im->plane[0] = ptr;
      im->stride[0] = stride;
      size = stride * height * 2;
      break;
    case XVID_CSP_ABGR:
    case XVID_CSP_BGRA:
    case XVID_CSP_RGBA:
#ifdef XVID_CSP_ARGB
    case XVID_CSP_ARGB:
#endif
      stride = width * 4;
      im->plane[0] = ptr;
      im->stride[0] = stride;
      size = stride * height;
      break;
  }

  return size;
}
コード例 #5
0
/* based on upstream gst-plugins-good jpegencoder */
static void
generate_sampling_factors (GstVaapiEncoderJpeg * encoder)
{
  GstVideoInfo *vinfo;
  gint i;

  vinfo = GST_VAAPI_ENCODER_VIDEO_INFO (encoder);

  if (GST_VIDEO_INFO_FORMAT (vinfo) == GST_VIDEO_FORMAT_ENCODED) {
    /* Use native I420 format */
    encoder->n_components = 3;
    for (i = 0; i < encoder->n_components; ++i) {
      if (i == 0)
        encoder->h_samp[i] = encoder->v_samp[i] = 2;
      else
        encoder->h_samp[i] = encoder->v_samp[i] = 1;
      GST_DEBUG ("sampling factors: %d %d", encoder->h_samp[i],
          encoder->v_samp[i]);
    }
    return;
  }

  encoder->n_components = GST_VIDEO_INFO_N_COMPONENTS (vinfo);

  encoder->h_max_samp = 0;
  encoder->v_max_samp = 0;
  for (i = 0; i < encoder->n_components; ++i) {
    encoder->cwidth[i] = GST_VIDEO_INFO_COMP_WIDTH (vinfo, i);
    encoder->cheight[i] = GST_VIDEO_INFO_COMP_HEIGHT (vinfo, i);
    encoder->h_samp[i] =
        GST_ROUND_UP_4 (GST_VIDEO_INFO_WIDTH (vinfo)) / encoder->cwidth[i];
    encoder->h_max_samp = MAX (encoder->h_max_samp, encoder->h_samp[i]);
    encoder->v_samp[i] =
        GST_ROUND_UP_4 (GST_VIDEO_INFO_HEIGHT (vinfo)) / encoder->cheight[i];
    encoder->v_max_samp = MAX (encoder->v_max_samp, encoder->v_samp[i]);
  }
  /* samp should only be 1, 2 or 4 */
  g_assert (encoder->h_max_samp <= 4);
  g_assert (encoder->v_max_samp <= 4);

  /* now invert */
  /* maximum is invariant, as one of the components should have samp 1 */
  for (i = 0; i < encoder->n_components; ++i) {
    encoder->h_samp[i] = encoder->h_max_samp / encoder->h_samp[i];
    encoder->v_samp[i] = encoder->v_max_samp / encoder->v_samp[i];
    GST_DEBUG ("sampling factors: %d %d", encoder->h_samp[i],
        encoder->v_samp[i]);
  }
}
コード例 #6
0
static gboolean
gst_bayer2rgb_get_unit_size (GstBaseTransform * base, GstCaps * caps,
    guint * size)
{
  GstStructure *structure;
  int width;
  int height;
  int pixsize;
  const char *name;

  structure = gst_caps_get_structure (caps, 0);

  if (gst_structure_get_int (structure, "width", &width) &&
      gst_structure_get_int (structure, "height", &height)) {
    name = gst_structure_get_name (structure);
    /* Our name must be either video/x-raw-bayer video/x-raw-rgb */
    if (strcmp (name, "video/x-raw-rgb")) {
      /* For bayer, we handle only BA81 (BGGR), which is BPP=24 */
      *size = GST_ROUND_UP_4 (width) * height;
      return TRUE;
    } else {
      /* For output, calculate according to format */
      if (gst_structure_get_int (structure, "bpp", &pixsize)) {
        *size = width * height * (pixsize / 8);
        return TRUE;
      }
    }

  }
  GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL),
      ("Incomplete caps, some required field missing"));
  return FALSE;
}
コード例 #7
0
static gboolean
gst_bayer2rgb_set_caps (GstBaseTransform * base, GstCaps * incaps,
    GstCaps * outcaps)
{
  GstBayer2RGB *filter = GST_BAYER2RGB (base);
  GstStructure *structure;
  int val, bpp;

  GST_DEBUG ("in caps %" GST_PTR_FORMAT " out caps %" GST_PTR_FORMAT, incaps,
      outcaps);

  structure = gst_caps_get_structure (incaps, 0);

  gst_structure_get_int (structure, "width", &filter->width);
  gst_structure_get_int (structure, "height", &filter->height);
  filter->stride = GST_ROUND_UP_4 (filter->width);

  /* To cater for different RGB formats, we need to set params for later */
  structure = gst_caps_get_structure (outcaps, 0);
  gst_structure_get_int (structure, "bpp", &bpp);
  filter->pixsize = bpp / 8;
  gst_structure_get_int (structure, "red_mask", &val);
  filter->r_off = get_pix_offset (val, bpp);
  gst_structure_get_int (structure, "green_mask", &val);
  filter->g_off = get_pix_offset (val, bpp);
  gst_structure_get_int (structure, "blue_mask", &val);
  filter->b_off = get_pix_offset (val, bpp);

  return TRUE;
}
コード例 #8
0
static void
user_endrow_callback (png_structp png_ptr, png_bytep new_row,
    png_uint_32 row_num, int pass)
{
  GstPngDec *pngdec = NULL;

  pngdec = GST_PNGDEC (png_get_io_ptr (png_ptr));

  /* FIXME: implement interlaced pictures */

  /* If buffer_out doesn't exist, it means buffer_alloc failed, which 
   * will already have set the return code */
  if (GST_IS_BUFFER (pngdec->current_frame->output_buffer)) {
    GstVideoFrame frame;
    GstBuffer *buffer = pngdec->current_frame->output_buffer;
    size_t offset;
    gint width;
    guint8 *data;

    if (!gst_video_frame_map (&frame, &pngdec->output_state->info, buffer,
            GST_MAP_WRITE)) {
      pngdec->ret = GST_FLOW_ERROR;
      return;
    }

    data = GST_VIDEO_FRAME_COMP_DATA (&frame, 0);
    offset = row_num * GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0);
    GST_LOG ("got row %u, copying in buffer %p at offset %" G_GSIZE_FORMAT,
        (guint) row_num, pngdec->current_frame->output_buffer, offset);
    width = GST_ROUND_UP_4 (png_get_rowbytes (pngdec->png, pngdec->info));
    memcpy (data + offset, new_row, width);
    gst_video_frame_unmap (&frame);
    pngdec->ret = GST_FLOW_OK;
  }
}
コード例 #9
0
static void
user_info_callback (png_structp png_ptr, png_infop info)
{
  GstPngDec *pngdec = NULL;
  GstFlowReturn ret = GST_FLOW_OK;
  size_t buffer_size;
  GstBuffer *buffer = NULL;

  pngdec = GST_PNGDEC (png_ptr->io_ptr);

  GST_LOG ("info ready");

  /* Generate the caps and configure */
  ret = gst_pngdec_caps_create_and_set (pngdec);
  if (ret != GST_FLOW_OK) {
    goto beach;
  }

  /* Allocate output buffer */
  pngdec->rowbytes = png_get_rowbytes (pngdec->png, pngdec->info);
  buffer_size = pngdec->height * GST_ROUND_UP_4 (pngdec->rowbytes);
  ret =
      gst_pad_alloc_buffer_and_set_caps (pngdec->srcpad, GST_BUFFER_OFFSET_NONE,
      buffer_size, GST_PAD_CAPS (pngdec->srcpad), &buffer);
  if (ret != GST_FLOW_OK) {
    goto beach;
  }

  pngdec->buffer_out = buffer;

beach:
  pngdec->ret = ret;
}
コード例 #10
0
static void
gst_jpegenc_resync (GstJpegEnc * jpegenc)
{
  gint width, height;
  gint i, j;

  GST_DEBUG_OBJECT (jpegenc, "resync");

  jpegenc->cinfo.image_width = width = jpegenc->width;
  jpegenc->cinfo.image_height = height = jpegenc->height;
  jpegenc->cinfo.input_components = jpegenc->channels;

  GST_DEBUG_OBJECT (jpegenc, "width %d, height %d", width, height);
  GST_DEBUG_OBJECT (jpegenc, "format %d", jpegenc->format);

  if (gst_video_format_is_rgb (jpegenc->format)) {
    GST_DEBUG_OBJECT (jpegenc, "RGB");
    jpegenc->cinfo.in_color_space = JCS_RGB;
  } else if (gst_video_format_is_gray (jpegenc->format)) {
    GST_DEBUG_OBJECT (jpegenc, "gray");
    jpegenc->cinfo.in_color_space = JCS_GRAYSCALE;
  } else {
    GST_DEBUG_OBJECT (jpegenc, "YUV");
    jpegenc->cinfo.in_color_space = JCS_YCbCr;
  }

  /* input buffer size as max output */
  jpegenc->bufsize = gst_video_format_get_size (jpegenc->format, width, height);
  jpeg_set_defaults (&jpegenc->cinfo);
  jpegenc->cinfo.raw_data_in = TRUE;
  /* duh, libjpeg maps RGB to YUV ... and don't expect some conversion */
  if (jpegenc->cinfo.in_color_space == JCS_RGB)
    jpeg_set_colorspace (&jpegenc->cinfo, JCS_RGB);

  GST_DEBUG_OBJECT (jpegenc, "h_max_samp=%d, v_max_samp=%d",
      jpegenc->h_max_samp, jpegenc->v_max_samp);
  /* image dimension info */
  for (i = 0; i < jpegenc->channels; i++) {
    GST_DEBUG_OBJECT (jpegenc, "comp %i: h_samp=%d, v_samp=%d", i,
        jpegenc->h_samp[i], jpegenc->v_samp[i]);
    jpegenc->cinfo.comp_info[i].h_samp_factor = jpegenc->h_samp[i];
    jpegenc->cinfo.comp_info[i].v_samp_factor = jpegenc->v_samp[i];
    jpegenc->line[i] = g_realloc (jpegenc->line[i],
        jpegenc->v_max_samp * DCTSIZE * sizeof (char *));
    if (!jpegenc->planar) {
      for (j = 0; j < jpegenc->v_max_samp * DCTSIZE; j++) {
        jpegenc->row[i][j] = g_realloc (jpegenc->row[i][j], width);
        jpegenc->line[i][j] = jpegenc->row[i][j];
      }
    }
  }

  /* guard against a potential error in gst_jpegenc_term_destination
     which occurs iff bufsize % 4 < free_space_remaining */
  jpegenc->bufsize = GST_ROUND_UP_4 (jpegenc->bufsize);

  jpeg_suppress_tables (&jpegenc->cinfo, TRUE);

  GST_DEBUG_OBJECT (jpegenc, "resync done");
}
コード例 #11
0
ファイル: emotion_convert.c プロジェクト: Limsik/e17
static void
_evas_video_i420(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height)
{
   const unsigned char **rows;
   unsigned int i, j;
   unsigned int rh;
   unsigned int stride_y, stride_uv;

   rh = output_height;

   rows = (const unsigned char **)evas_data;

   stride_y = GST_ROUND_UP_4(w);
   stride_uv = GST_ROUND_UP_8(w) / 2;

   for (i = 0; i < rh; i++)
     rows[i] = &gst_data[i * stride_y];

   for (j = 0; j < (rh / 2); j++, i++)
     rows[i] = &gst_data[h * stride_y + j * stride_uv];

   for (j = 0; j < (rh / 2); j++, i++)
     rows[i] = &gst_data[h * stride_y +
			 (rh / 2) * stride_uv +
			 j * stride_uv];
}
コード例 #12
0
ファイル: video.c プロジェクト: genesi/gst-base-plugins
static void
paint_setup_Y800 (paintinfo * p, unsigned char *dest)
{
  /* untested */
  p->yp = dest;
  p->ystride = GST_ROUND_UP_4 (p->width);
  p->endptr = dest + p->ystride * p->height;
}
コード例 #13
0
ファイル: video.c プロジェクト: genesi/gst-base-plugins
static void
paint_setup_IYU2 (paintinfo * p, unsigned char *dest)
{
  /* untested */
  p->yp = dest + 1;
  p->up = dest + 0;
  p->vp = dest + 2;
  p->ystride = GST_ROUND_UP_4 (p->width * 3);
  p->endptr = dest + p->ystride * p->height;
}
コード例 #14
0
ファイル: video.c プロジェクト: genesi/gst-base-plugins
static void
paint_setup_Y42B (paintinfo * p, unsigned char *dest)
{
  p->yp = dest;
  p->ystride = GST_ROUND_UP_4 (p->width);
  p->up = p->yp + p->ystride * p->height;
  p->ustride = GST_ROUND_UP_8 (p->width) / 2;
  p->vp = p->up + p->ustride * p->height;
  p->vstride = GST_ROUND_UP_8 (p->width) / 2;
  p->endptr = p->vp + p->vstride * p->height;
}
コード例 #15
0
ファイル: mxfup.c プロジェクト: pli3/gst-plugins-bad
static GstFlowReturn
mxf_up_write_func (GstBuffer * buffer, GstCaps * caps, gpointer mapping_data,
    GstAdapter * adapter, GstBuffer ** outbuf, gboolean flush)
{
  MXFUPMappingData *data = mapping_data;

  if (!buffer)
    return GST_FLOW_OK;

  if (GST_BUFFER_SIZE (buffer) !=
      GST_ROUND_UP_4 (data->bpp * data->width) * data->height) {
    GST_ERROR ("Invalid buffer size");
    return GST_FLOW_ERROR;
  }

  if (data->bpp != 4
      || GST_ROUND_UP_4 (data->width * data->bpp) != data->width * data->bpp) {
    guint y;
    GstBuffer *ret;
    guint8 *indata, *outdata;

    ret = gst_buffer_new_and_alloc (data->width * data->bpp * data->height);
    indata = GST_BUFFER_DATA (buffer);
    outdata = GST_BUFFER_DATA (ret);

    for (y = 0; y < data->height; y++) {
      memcpy (outdata, indata, data->width * data->bpp);
      indata += GST_ROUND_UP_4 (data->width * data->bpp);
      outdata += data->width * data->bpp;
    }

    gst_buffer_unref (buffer);

    *outbuf = ret;
  } else {
    *outbuf = buffer;
  }

  return GST_FLOW_OK;
}
コード例 #16
0
ファイル: emotion_convert.c プロジェクト: Limsik/e17
static void
_evas_video_yuy2(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h __UNUSED__, unsigned int output_height)
{
   const unsigned char **rows;
   unsigned int i;
   unsigned int stride;

   rows = (const unsigned char **)evas_data;

   stride = GST_ROUND_UP_4(w * 2);

   for (i = 0; i < output_height; i++)
     rows[i] = &gst_data[i * stride];
}
コード例 #17
0
ファイル: gstpngenc.c プロジェクト: spunktsch/svtplayer
static gboolean
gst_pngenc_setcaps (GstPad * pad, GstCaps * caps)
{
  GstPngEnc *pngenc;
  const GValue *fps;
  GstStructure *structure;
  GstCaps *pcaps;
  gboolean ret = TRUE;

  pngenc = GST_PNGENC (gst_pad_get_parent (pad));

  structure = gst_caps_get_structure (caps, 0);
  gst_structure_get_int (structure, "width", &pngenc->width);
  gst_structure_get_int (structure, "height", &pngenc->height);
  fps = gst_structure_get_value (structure, "framerate");
  gst_structure_get_int (structure, "bpp", &pngenc->bpp);

  if (pngenc->bpp == 32)
    pngenc->stride = pngenc->width * 4;
  else if (pngenc->bpp == 8)
    pngenc->stride = GST_ROUND_UP_4 (pngenc->width);
  else
    pngenc->stride = GST_ROUND_UP_4 (pngenc->width * 3);

  pcaps = gst_caps_new_simple ("image/png",
      "width", G_TYPE_INT, pngenc->width,
      "height", G_TYPE_INT, pngenc->height, NULL);
  structure = gst_caps_get_structure (pcaps, 0);
  gst_structure_set_value (structure, "framerate", fps);

  ret = gst_pad_set_caps (pngenc->srcpad, pcaps);

  gst_caps_unref (pcaps);
  gst_object_unref (pngenc);

  return ret;
}
コード例 #18
0
ファイル: video.c プロジェクト: genesi/gst-base-plugins
static void
paint_setup_YVU9 (paintinfo * p, unsigned char *dest)
{
  int h = GST_ROUND_UP_4 (p->height);

  p->yp = dest;
  p->ystride = GST_ROUND_UP_4 (p->width);
  p->vp = p->yp + p->ystride * GST_ROUND_UP_4 (p->height);
  p->vstride = GST_ROUND_UP_4 (p->ystride / 4);
  p->up = p->vp + p->vstride * GST_ROUND_UP_4 (h / 4);
  p->ustride = GST_ROUND_UP_4 (p->ystride / 4);
  p->endptr = p->up + p->ustride * GST_ROUND_UP_4 (h / 4);
}
コード例 #19
0
ファイル: mediaplayer.cpp プロジェクト: renielcanlas/spivak
void MediaPlayer::drawVideoFrame(QPainter &p, const QRect &rect)
{
    QMutexLocker m( &m_lastVideoSampleMutex );

    if ( !m_lastVideoSample )
        return;

    // get the snapshot buffer format now. We set the caps on the appsink so
    // that it can only be an rgb buffer.
    GstCaps *caps = gst_sample_get_caps( m_lastVideoSample );

    if ( !caps )
    {
        reportError( "could not get caps for the new video sample" );
        return;
    }

    GstStructure * structure = gst_caps_get_structure( caps, 0 );

    // We need to get the final caps on the buffer to get the size
    int width = 0;
    int height = 0;

    gst_structure_get_int( structure, "width", &width );
    gst_structure_get_int( structure, "height", &height );

    if ( !width || !height )
    {
        reportError( "could not get video height and width" );
        return;
    }

    // Create pixmap from buffer and save, gstreamer video buffers have a stride that
    // is rounded up to the nearest multiple of 4
    GstBuffer *buffer = gst_sample_get_buffer( m_lastVideoSample );
    GstMapInfo map;

    if ( !gst_buffer_map( buffer, &map, GST_MAP_READ ) )
    {
        reportError( "could not map video buffer" );
        return;
    }

    p.drawImage( rect, QImage( map.data, width, height, GST_ROUND_UP_4 (width * 4), QImage::Format_RGB32 ), QRect( 0, 0, width, height ) );

    // And clean up
    gst_buffer_unmap( buffer, &map );
}
コード例 #20
0
static void
gst_audio_cheb_band_set_property (GObject * object, guint prop_id,
    const GValue * value, GParamSpec * pspec)
{
  GstAudioChebBand *filter = GST_AUDIO_CHEB_BAND (object);

  switch (prop_id) {
    case PROP_MODE:
      g_mutex_lock (filter->lock);
      filter->mode = g_value_get_enum (value);
      generate_coefficients (filter);
      g_mutex_unlock (filter->lock);
      break;
    case PROP_TYPE:
      g_mutex_lock (filter->lock);
      filter->type = g_value_get_int (value);
      generate_coefficients (filter);
      g_mutex_unlock (filter->lock);
      break;
    case PROP_LOWER_FREQUENCY:
      g_mutex_lock (filter->lock);
      filter->lower_frequency = g_value_get_float (value);
      generate_coefficients (filter);
      g_mutex_unlock (filter->lock);
      break;
    case PROP_UPPER_FREQUENCY:
      g_mutex_lock (filter->lock);
      filter->upper_frequency = g_value_get_float (value);
      generate_coefficients (filter);
      g_mutex_unlock (filter->lock);
      break;
    case PROP_RIPPLE:
      g_mutex_lock (filter->lock);
      filter->ripple = g_value_get_float (value);
      generate_coefficients (filter);
      g_mutex_unlock (filter->lock);
      break;
    case PROP_POLES:
      g_mutex_lock (filter->lock);
      filter->poles = GST_ROUND_UP_4 (g_value_get_int (value));
      generate_coefficients (filter);
      g_mutex_unlock (filter->lock);
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }
}
コード例 #21
0
static void
clutter_gst_ayuv_upload (ClutterGstVideoSink *sink,
                         GstBuffer           *buffer)
{
  ClutterGstVideoSinkPrivate *priv= sink->priv;

  clutter_texture_set_from_rgb_data (priv->texture,
                                     GST_BUFFER_DATA (buffer),
                                     TRUE,
                                     priv->width,
                                     priv->height,
                                     GST_ROUND_UP_4 (4 * priv->width),
                                     4,
                                     0,
                                     NULL);
}
コード例 #22
0
static void
gst_jpegenc_term_destination (j_compress_ptr cinfo)
{
  GstJpegEnc *jpegenc = (GstJpegEnc *) (cinfo->client_data);
  GST_DEBUG_OBJECT (jpegenc, "gst_jpegenc_chain: term_source");

  /* Trim the buffer size and push it. */
  GST_BUFFER_SIZE (jpegenc->output_buffer) =
      GST_ROUND_UP_4 (GST_BUFFER_SIZE (jpegenc->output_buffer) -
      jpegenc->jdest.free_in_buffer);

  g_signal_emit (G_OBJECT (jpegenc), gst_jpegenc_signals[FRAME_ENCODED], 0);

  jpegenc->last_ret = gst_pad_push (jpegenc->srcpad, jpegenc->output_buffer);
  jpegenc->output_buffer = NULL;
}
コード例 #23
0
static gboolean
gst_visual_src_setcaps (GstPad * pad, GstCaps * caps)
{
  GstVisual *visual = GST_VISUAL (gst_pad_get_parent (pad));
  GstStructure *structure;
  gint depth, pitch;

  structure = gst_caps_get_structure (caps, 0);

  GST_DEBUG_OBJECT (visual, "src pad got caps %" GST_PTR_FORMAT, caps);

  if (!gst_structure_get_int (structure, "width", &visual->width))
    goto error;
  if (!gst_structure_get_int (structure, "height", &visual->height))
    goto error;
  if (!gst_structure_get_int (structure, "bpp", &depth))
    goto error;
  if (!gst_structure_get_fraction (structure, "framerate", &visual->fps_n,
          &visual->fps_d))
    goto error;

  visual_video_set_depth (visual->video,
      visual_video_depth_enum_from_value (depth));
  visual_video_set_dimension (visual->video, visual->width, visual->height);
  pitch = GST_ROUND_UP_4 (visual->width * visual->video->bpp);
  visual_video_set_pitch (visual->video, pitch);
  visual_actor_video_negotiate (visual->actor, 0, FALSE, FALSE);

  /* precalc some values */
  visual->outsize = visual->video->height * pitch;
  visual->spf =
      gst_util_uint64_scale_int (visual->rate, visual->fps_d, visual->fps_n);
  visual->duration =
      gst_util_uint64_scale_int (GST_SECOND, visual->fps_d, visual->fps_n);

  gst_object_unref (visual);
  return TRUE;

  /* ERRORS */
error:
  {
    GST_DEBUG_OBJECT (visual, "error parsing caps");
    gst_object_unref (visual);
    return FALSE;
  }
}
コード例 #24
0
static void
clutter_gst_rgb32_upload (ClutterGstVideoSink *sink,
                          GstBuffer           *buffer)
{
  ClutterGstVideoSinkPrivate *priv= sink->priv;

  clutter_texture_set_from_rgb_data (priv->texture,
                                     GST_BUFFER_DATA (buffer),
                                     TRUE,
                                     priv->width,
                                     priv->height,
                                     GST_ROUND_UP_4 (4 * priv->width),
                                     4,
                                     priv->bgr ?
                                     CLUTTER_TEXTURE_RGB_FLAG_BGR : 0,
                                     NULL);
}
コード例 #25
0
/**
 * gst_video_format_get_row_stride:
 * @format: a #GstVideoFormat
 * @component: the component index
 * @width: the width of video
 *
 * Calculates the row stride (number of bytes from one row of pixels to
 * the next) for the video component with an index of @component.  For
 * YUV video, Y, U, and V have component indices of 0, 1, and 2,
 * respectively.  For RGB video, R, G, and B have component indicies of
 * 0, 1, and 2, respectively.  Alpha channels, if present, have a component
 * index of 3.  The @width parameter always represents the width of the
 * video, not the component.
 *
 * Since: 0.10.16
 *
 * Returns: row stride of component @component
 */
int
gst_video_format_get_row_stride (GstVideoFormat format, int component,
    int width)
{
  g_return_val_if_fail (format != GST_VIDEO_FORMAT_UNKNOWN, 0);
  g_return_val_if_fail (component >= 0 && component <= 3, 0);
  g_return_val_if_fail (width > 0, 0);

  switch (format) {
    case GST_VIDEO_FORMAT_I420:
    case GST_VIDEO_FORMAT_YV12:
      if (component == 0) {
        return GST_ROUND_UP_4 (width);
      } else {
        return GST_ROUND_UP_4 (GST_ROUND_UP_2 (width) / 2);
      }
    case GST_VIDEO_FORMAT_YUY2:
    case GST_VIDEO_FORMAT_UYVY:
      return GST_ROUND_UP_4 (width * 2);
    case GST_VIDEO_FORMAT_AYUV:
    case GST_VIDEO_FORMAT_RGBx:
    case GST_VIDEO_FORMAT_BGRx:
    case GST_VIDEO_FORMAT_xRGB:
    case GST_VIDEO_FORMAT_xBGR:
    case GST_VIDEO_FORMAT_RGBA:
    case GST_VIDEO_FORMAT_BGRA:
    case GST_VIDEO_FORMAT_ARGB:
    case GST_VIDEO_FORMAT_ABGR:
      return width * 4;
    case GST_VIDEO_FORMAT_RGB:
    case GST_VIDEO_FORMAT_BGR:
      return GST_ROUND_UP_4 (width * 3);
    case GST_VIDEO_FORMAT_Y41B:
      if (component == 0) {
        return GST_ROUND_UP_4 (width);
      } else {
        return GST_ROUND_UP_8 (width) / 4;
      }
    case GST_VIDEO_FORMAT_Y42B:
      if (component == 0) {
        return GST_ROUND_UP_4 (width);
      } else {
        return GST_ROUND_UP_8 (width) / 2;
      }
    default:
      return 0;
  }
}
コード例 #26
0
EXPORT_C
#endif

int
gst_video_format_get_row_stride (GstVideoFormat format, int component,
    int width)
{
  switch (format) {
    case GST_VIDEO_FORMAT_I420:
    case GST_VIDEO_FORMAT_YV12:
      if (component == 0) {
        return GST_ROUND_UP_4 (width);
      } else {
        return GST_ROUND_UP_4 (GST_ROUND_UP_2 (width) / 2);
      }
    case GST_VIDEO_FORMAT_YUY2:
    case GST_VIDEO_FORMAT_UYVY:
      return GST_ROUND_UP_4 (width * 2);
    case GST_VIDEO_FORMAT_AYUV:
    case GST_VIDEO_FORMAT_RGBx:
    case GST_VIDEO_FORMAT_BGRx:
    case GST_VIDEO_FORMAT_xRGB:
    case GST_VIDEO_FORMAT_xBGR:
    case GST_VIDEO_FORMAT_RGBA:
    case GST_VIDEO_FORMAT_BGRA:
    case GST_VIDEO_FORMAT_ARGB:
    case GST_VIDEO_FORMAT_ABGR:
      return width * 4;
    case GST_VIDEO_FORMAT_RGB:
    case GST_VIDEO_FORMAT_BGR:
      return GST_ROUND_UP_4 (width * 3);
    case GST_VIDEO_FORMAT_Y41B:
      if (component == 0) {
        return GST_ROUND_UP_4 (width);
      } else {
        return GST_ROUND_UP_8 (width) / 4;
      }
    case GST_VIDEO_FORMAT_Y42B:
      if (component == 0) {
        return GST_ROUND_UP_4 (width);
      } else {
        return GST_ROUND_UP_8 (width) / 2;
      }
    default:
      return 0;
  }
}
コード例 #27
0
static void
user_endrow_callback (png_structp png_ptr, png_bytep new_row,
    png_uint_32 row_num, int pass)
{
  GstPngDec *pngdec = NULL;

  pngdec = GST_PNGDEC (png_ptr->io_ptr);

  /* FIXME: implement interlaced pictures */

  /* If buffer_out doesn't exist, it means buffer_alloc failed, which 
   * will already have set the return code */
  if (GST_IS_BUFFER (pngdec->buffer_out)) {
    size_t offset = row_num * GST_ROUND_UP_4 (pngdec->rowbytes);

    GST_LOG ("got row %u, copying in buffer %p at offset %" G_GSIZE_FORMAT,
        (guint) row_num, pngdec->buffer_out, offset);
    memcpy (GST_BUFFER_DATA (pngdec->buffer_out) + offset, new_row,
        pngdec->rowbytes);
    pngdec->ret = GST_FLOW_OK;
  }
}
コード例 #28
0
GstMemory *
my_vidmem_alloc (guint format, guint width, guint height)
{
  MyVidmem *mem;
  gsize maxsize;

  GST_DEBUG ("alloc frame format %u %ux%u", format, width, height);

  maxsize = (GST_ROUND_UP_4 (width) * height);

  mem = g_slice_new (MyVidmem);

  gst_memory_init (GST_MEMORY_CAST (mem), 0, _my_allocator, NULL,
      maxsize, 31, 0, maxsize);

  mem->format = format;
  mem->width = width;
  mem->height = height;
  mem->data = NULL;

  return (GstMemory *) mem;
}
コード例 #29
0
/**
 * gst_video_format_get_size:
 * @format: a #GstVideoFormat
 * @width: the width of video
 * @height: the height of video
 *
 * Calculates the total number of bytes in the raw video format.  This
 * number should be used when allocating a buffer for raw video.
 *
 * Since: 0.10.16
 *
 * Returns: size (in bytes) of raw video format
 */
int
gst_video_format_get_size (GstVideoFormat format, int width, int height)
{
  int size;

  g_return_val_if_fail (format != GST_VIDEO_FORMAT_UNKNOWN, 0);
  g_return_val_if_fail (width > 0 && height > 0, 0);

  switch (format) {
    case GST_VIDEO_FORMAT_I420:
    case GST_VIDEO_FORMAT_YV12:
      size = GST_ROUND_UP_4 (width) * GST_ROUND_UP_2 (height);
      size += GST_ROUND_UP_4 (GST_ROUND_UP_2 (width) / 2) *
          (GST_ROUND_UP_2 (height) / 2) * 2;
      return size;
    case GST_VIDEO_FORMAT_YUY2:
    case GST_VIDEO_FORMAT_UYVY:
      return GST_ROUND_UP_4 (width * 2) * height;
    case GST_VIDEO_FORMAT_AYUV:
    case GST_VIDEO_FORMAT_RGBx:
    case GST_VIDEO_FORMAT_BGRx:
    case GST_VIDEO_FORMAT_xRGB:
    case GST_VIDEO_FORMAT_xBGR:
    case GST_VIDEO_FORMAT_RGBA:
    case GST_VIDEO_FORMAT_BGRA:
    case GST_VIDEO_FORMAT_ARGB:
    case GST_VIDEO_FORMAT_ABGR:
      return width * 4 * height;
    case GST_VIDEO_FORMAT_RGB:
    case GST_VIDEO_FORMAT_BGR:
      return GST_ROUND_UP_4 (width * 3) * height;
    case GST_VIDEO_FORMAT_Y41B:
      /* simplification of ROUNDUP4(w)*h + 2*((ROUNDUP8(w)/4)*h */
      return (GST_ROUND_UP_4 (width) + (GST_ROUND_UP_8 (width) / 2)) * height;
    case GST_VIDEO_FORMAT_Y42B:
      /* simplification of ROUNDUP4(w)*h + 2*(ROUNDUP8(w)/2)*h: */
      return (GST_ROUND_UP_4 (width) + GST_ROUND_UP_8 (width)) * height;
    default:
      return 0;
  }
}
コード例 #30
0
void QGstreamerGLTextureRenderer::setFallbackBuffer(GstBuffer *buffer)
{
#ifdef GL_TEXTURE_SINK_DEBUG
    qDebug() << Q_FUNC_INFO << buffer;
#endif
    m_fallbackImage = QImage();

    if (!buffer)
        return;

    GstCaps *caps = GST_BUFFER_CAPS(buffer);
    const uchar *data = GST_BUFFER_DATA(buffer);

    if (!(caps && data))
        return;

    const GstStructure *structure = gst_caps_get_structure(caps, 0);
    guint32 fourcc;
    gst_structure_get_fourcc(structure, "format", &fourcc);

    if (fourcc != GST_MAKE_FOURCC('Y', 'U', 'Y', '2') &&
        fourcc != GST_MAKE_FOURCC('U', 'Y', 'V', 'Y'))
        return;

    QSize resolution = QGstUtils::capsResolution(caps);
    m_fallbackImage = QImage(resolution, QImage::Format_RGB32);

    for (int y=0; y<resolution.height(); y++) {
        const uchar *src = data + y*GST_ROUND_UP_4(resolution.width())*2;
        quint32 *dst = (quint32 *)m_fallbackImage.scanLine(y);

        int y1, y2;
        int u, v;

        int r1, g1, b1;
        int r2, g2, b2;

        int r, g, b;

        for (int x=0; x<resolution.width(); x+=2) {
            if (fourcc == GST_MAKE_FOURCC('Y', 'U', 'Y', '2')) {
                y1 = *src; src++;
                u  = *src; src++;
                y2 = *src; src++;
                v  = *src; src++;
            } else {
                u  = *src; src++;
                y1 = *src; src++;
                v  = *src; src++;
                y2 = *src; src++;
            }

            y1 = 298*y1/256;
            y2 = 298*y2/256;

            r = 408*v/256 - 223;
            g = - 100*u/256 - 208*v/256 + 136;
            b = 516*u/256 - 276;

            r1 = qBound(0, y1 + r, 255);
            g1 = qBound(0, y1 + g, 255);
            b1 = qBound(0, y1 + b, 255);

            *dst = qRgb(r1, g1, b1); dst++;

            r2 = qBound(0, y2 + r, 255);
            g2 = qBound(0, y2 + g, 255);
            b2 = qBound(0, y2 + b, 255);

            *dst = qRgb(r2, g2, b2); dst++;
        }
    }
}