示例#1
0
static GstFlowReturn
gst_ffmpegscale_transform (GstBaseTransform * trans, GstBuffer * inbuf,
                           GstBuffer * outbuf)
{
    GstFFMpegScale *scale = GST_FFMPEGSCALE (trans);
    GstVideoFrame in_frame, out_frame;

    if (!gst_video_frame_map (&in_frame, &scale->in_info, inbuf, GST_MAP_READ))
        goto invalid_buffer;

    if (!gst_video_frame_map (&out_frame, &scale->out_info, outbuf,
                              GST_MAP_WRITE))
        goto invalid_buffer;

    sws_scale (scale->ctx, (const guint8 **) in_frame.data, in_frame.info.stride,
               0, scale->in_info.height, (guint8 **) out_frame.data,
               out_frame.info.stride);

    gst_video_frame_unmap (&in_frame);
    gst_video_frame_unmap (&out_frame);

    return GST_FLOW_OK;

    /* ERRORS */
invalid_buffer:
    {
        return GST_FLOW_OK;
    }
}
static GstFlowReturn
gst_video_filter_transform (GstBaseTransform * trans, GstBuffer * inbuf,
    GstBuffer * outbuf)
{
  GstFlowReturn res;
  GstVideoFilter *filter = GST_VIDEO_FILTER_CAST (trans);
  GstVideoFilterClass *fclass;

  if (G_UNLIKELY (!filter->negotiated))
    goto unknown_format;

  fclass = GST_VIDEO_FILTER_GET_CLASS (filter);
  if (fclass->transform_frame) {
    GstVideoFrame in_frame, out_frame;

    if (!gst_video_frame_map (&in_frame, &filter->in_info, inbuf, GST_MAP_READ))
      goto invalid_buffer;

    if (!gst_video_frame_map (&out_frame, &filter->out_info, outbuf,
            GST_MAP_WRITE))
      goto invalid_buffer;

    /* GstVideoFrame has another reference, so the buffer looks unwriteable,
     * meaning that we can't attach any metas or anything to it. Other
     * map() functions like gst_buffer_map() don't get another reference
     * of the buffer and expect the buffer reference to be kept until
     * the buffer is unmapped again. */
    gst_buffer_unref (inbuf);
    gst_buffer_unref (outbuf);
    res = fclass->transform_frame (filter, &in_frame, &out_frame);
    gst_buffer_ref (inbuf);
    gst_buffer_ref (outbuf);

    gst_video_frame_unmap (&out_frame);
    gst_video_frame_unmap (&in_frame);
  } else {
    GST_DEBUG_OBJECT (trans, "no transform_frame vmethod");
    res = GST_FLOW_OK;
  }

  return res;

  /* ERRORS */
unknown_format:
  {
    GST_ELEMENT_ERROR (filter, CORE, NOT_IMPLEMENTED, (NULL),
        ("unknown format"));
    return GST_FLOW_NOT_NEGOTIATED;
  }
invalid_buffer:
  {
    GST_ELEMENT_WARNING (filter, CORE, NOT_IMPLEMENTED, (NULL),
        ("invalid video buffer received"));
    return GST_FLOW_OK;
  }
}
示例#3
0
static void
copy_field (GstInterlace * interlace, GstBuffer * dest, GstBuffer * src,
    int field_index)
{
  GstVideoInfo *info = &interlace->info;
  gint i, j, n_planes;
  guint8 *d, *s;
  GstVideoFrame dframe, sframe;

  if (!gst_video_frame_map (&dframe, info, dest, GST_MAP_WRITE))
    goto dest_map_failed;

  if (!gst_video_frame_map (&sframe, info, src, GST_MAP_READ))
    goto src_map_failed;

  n_planes = GST_VIDEO_FRAME_N_PLANES (&dframe);

  for (i = 0; i < n_planes; i++) {
    gint cheight, cwidth;
    gint ss, ds;

    d = GST_VIDEO_FRAME_PLANE_DATA (&dframe, i);
    s = GST_VIDEO_FRAME_PLANE_DATA (&sframe, i);

    ds = GST_VIDEO_FRAME_PLANE_STRIDE (&dframe, i);
    ss = GST_VIDEO_FRAME_PLANE_STRIDE (&sframe, i);

    d += field_index * ds;
    s += field_index * ss;

    cheight = GST_VIDEO_FRAME_COMP_HEIGHT (&dframe, i);
    cwidth = MIN (ABS (ss), ABS (ds));

    for (j = field_index; j < cheight; j += 2) {
      memcpy (d, s, cwidth);
      d += ds * 2;
      s += ss * 2;
    }
  }

  gst_video_frame_unmap (&dframe);
  gst_video_frame_unmap (&sframe);
  return;

dest_map_failed:
  {
    GST_ERROR_OBJECT (interlace, "failed to map dest");
    return;
  }
src_map_failed:
  {
    GST_ERROR_OBJECT (interlace, "failed to map src");
    gst_video_frame_unmap (&dframe);
    return;
  }
}
static void
gst_gl_composition_overlay_upload (GstGLCompositionOverlay * overlay,
    GstBuffer * buf)
{
  GstGLMemory *comp_gl_memory = NULL;
  GstBuffer *comp_buffer = NULL;
  GstBuffer *overlay_buffer = NULL;
  GstVideoInfo vinfo;
  GstVideoMeta *vmeta;
  GstVideoFrame *comp_frame;
  GstVideoFrame gl_frame;

  comp_buffer =
      gst_video_overlay_rectangle_get_pixels_unscaled_argb (overlay->rectangle,
      GST_VIDEO_OVERLAY_FORMAT_FLAG_PREMULTIPLIED_ALPHA);

  comp_frame = g_slice_new (GstVideoFrame);

  vmeta = gst_buffer_get_video_meta (comp_buffer);
  gst_video_info_set_format (&vinfo, vmeta->format, vmeta->width,
      vmeta->height);
  vinfo.stride[0] = vmeta->stride[0];

  if (gst_video_frame_map (comp_frame, &vinfo, comp_buffer, GST_MAP_READ)) {
    gst_gl_composition_overlay_add_transformation (overlay, buf);

    comp_gl_memory =
        gst_gl_memory_wrapped (overlay->context, GST_GL_TEXTURE_TARGET_2D,
        &comp_frame->info, 0, NULL, comp_frame->data[0], comp_frame,
        _video_frame_unmap_and_free);

    overlay_buffer = gst_buffer_new ();
    gst_buffer_append_memory (overlay_buffer, (GstMemory *) comp_gl_memory);

    if (!gst_video_frame_map (&gl_frame, &comp_frame->info, overlay_buffer,
            GST_MAP_READ | GST_MAP_GL)) {
      gst_buffer_unref (overlay_buffer);
      _video_frame_unmap_and_free (comp_frame);
      GST_WARNING_OBJECT (overlay, "Cannot upload overlay texture");
      return;
    }

    gst_memory_ref ((GstMemory *) comp_gl_memory);
    overlay->gl_memory = comp_gl_memory;
    overlay->texture_id = comp_gl_memory->tex_id;

    gst_buffer_unref (overlay_buffer);
    gst_video_frame_unmap (&gl_frame);

    GST_DEBUG ("uploaded overlay texture %d", overlay->texture_id);
  } else {
    g_slice_free (GstVideoFrame, comp_frame);
  }
}
static GstFlowReturn
gst_v4l2_buffer_pool_copy_buffer (GstV4l2BufferPool * pool, GstBuffer * dest,
    GstBuffer * src)
{
  const GstVideoFormatInfo *finfo = pool->caps_info.finfo;

  GST_LOG_OBJECT (pool, "copying buffer");

  if (finfo && (finfo->format != GST_VIDEO_FORMAT_UNKNOWN &&
          finfo->format != GST_VIDEO_FORMAT_ENCODED)) {
    GstVideoFrame src_frame, dest_frame;

    GST_DEBUG_OBJECT (pool, "copy video frame");

    /* we have raw video, use videoframe copy to get strides right */
    if (!gst_video_frame_map (&src_frame, &pool->caps_info, src, GST_MAP_READ))
      goto invalid_buffer;

    if (!gst_video_frame_map (&dest_frame, &pool->caps_info, dest,
            GST_MAP_WRITE)) {
      gst_video_frame_unmap (&src_frame);
      goto invalid_buffer;
    }

    gst_video_frame_copy (&dest_frame, &src_frame);

    gst_video_frame_unmap (&src_frame);
    gst_video_frame_unmap (&dest_frame);
  } else {
    GstMapInfo map;

    GST_DEBUG_OBJECT (pool, "copy raw bytes");

    if (!gst_buffer_map (src, &map, GST_MAP_READ))
      goto invalid_buffer;

    gst_buffer_fill (dest, 0, map.data, gst_buffer_get_size (src));

    gst_buffer_unmap (src, &map);
    gst_buffer_resize (dest, 0, gst_buffer_get_size (src));
  }

  GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, pool, "slow copy into buffer %p",
      dest);

  return GST_FLOW_OK;

invalid_buffer:
  {
    GST_ERROR_OBJECT (pool, "could not map buffer");
    return GST_FLOW_ERROR;
  }
}
示例#6
0
static MsdkSurface *
get_surface (GstMsdkDec * thiz, GstBuffer * buffer)
{
  MsdkSurface *i;

  i = g_slice_new0 (MsdkSurface);

  if (gst_msdk_is_msdk_buffer (buffer)) {
    i->surface = gst_msdk_get_surface_from_buffer (buffer);
    i->buf = buffer;
  } else {
    /* Confirm to activate the side pool */
    if (!gst_buffer_pool_is_active (thiz->pool) &&
        !gst_buffer_pool_set_active (thiz->pool, TRUE)) {
      g_slice_free (MsdkSurface, i);
      return NULL;
    }

    if (!gst_video_frame_map (&i->copy, &thiz->non_msdk_pool_info, buffer,
            GST_MAP_WRITE))
      goto failed_unref_buffer;

    if (gst_buffer_pool_acquire_buffer (thiz->pool, &buffer,
            NULL) != GST_FLOW_OK)
      goto failed_unmap_copy;

    i->surface = gst_msdk_get_surface_from_buffer (buffer);
    i->buf = buffer;

    if (!gst_video_frame_map (&i->data, &thiz->output_info, buffer,
            GST_MAP_READWRITE))
      goto failed_unref_buffer2;
  }

  thiz->decoded_msdk_surfaces = g_list_append (thiz->decoded_msdk_surfaces, i);
  return i;

failed_unref_buffer2:
  gst_buffer_unref (buffer);
  buffer = i->data.buffer;
failed_unmap_copy:
  gst_video_frame_unmap (&i->copy);
failed_unref_buffer:
  gst_buffer_unref (buffer);
  g_slice_free (MsdkSurface, i);

  GST_ERROR_OBJECT (thiz, "failed to handle buffer");
  return NULL;
}
示例#7
0
static void gub_copy_texture_d3d11(GUBGraphicContextD3D11 *gcontext, GstVideoInfo *video_info, GstBuffer *buffer, void *native_texture_ptr)
{
    GUBGraphicDeviceD3D11* gdevice = (GUBGraphicDeviceD3D11*)gub_graphic_device;
    ID3D11DeviceContext* ctx = NULL;
    gdevice->d3d11device->lpVtbl->GetImmediateContext(gdevice->d3d11device, &ctx);
    if (!gcontext->crop_setup) {
        GstElement *crop;
        crop = gst_bin_get_by_name(GST_BIN(gcontext->pipeline), "crop");
        if (crop) {
            g_object_set(crop,
                "left", (int)(video_info->width *  gcontext->crop_left),
                "top", (int)(video_info->height * gcontext->crop_top),
                "right", (int)(video_info->width *  gcontext->crop_right),
                "bottom", (int)(video_info->height * gcontext->crop_bottom),
                NULL);
            gst_object_unref(crop);
        }
        gcontext->crop_setup = TRUE;
    }
    // update native texture from code
    if (native_texture_ptr) {
        GstVideoFrame video_frame;
        gst_video_frame_map(&video_frame, video_info, buffer, GST_MAP_READ);
        ctx->lpVtbl->UpdateSubresource(ctx, (ID3D11Resource *)native_texture_ptr, 0, NULL, GST_VIDEO_FRAME_PLANE_DATA(&video_frame, 0), video_info->width * 4, 0);
        gst_video_frame_unmap(&video_frame);
    }
    ctx->lpVtbl->Release(ctx);
}
示例#8
0
static void
add_field (GstIvtc * ivtc, GstBuffer * buffer, int parity, int index)
{
    int i = ivtc->n_fields;
    GstClockTime ts;
    GstIvtcField *field = &ivtc->fields[i];

    g_return_if_fail (i < GST_IVTC_MAX_FIELDS);

    ts = GST_BUFFER_PTS (buffer) + index * ivtc->field_duration;
    if (ts + ivtc->field_duration < ivtc->segment.start) {
        /* drop, it's before our segment */
        return;
    }

    GST_DEBUG ("adding field %d", i);

    field->buffer = gst_buffer_ref (buffer);
    field->parity = parity;
    field->ts = ts;

    gst_video_frame_map (&ivtc->fields[i].frame, &ivtc->sink_video_info,
                         buffer, GST_MAP_READ);

    ivtc->n_fields++;
}
示例#9
0
static void
user_endrow_callback (png_structp png_ptr, png_bytep new_row,
    png_uint_32 row_num, int pass)
{
  GstPngDec *pngdec = NULL;

  pngdec = GST_PNGDEC (png_get_io_ptr (png_ptr));

  /* If buffer_out doesn't exist, it means buffer_alloc failed, which 
   * will already have set the return code */
  if (new_row && GST_IS_BUFFER (pngdec->current_frame->output_buffer)) {
    GstVideoFrame frame;
    GstBuffer *buffer = pngdec->current_frame->output_buffer;
    size_t offset;
    guint8 *data;

    if (!gst_video_frame_map (&frame, &pngdec->output_state->info, buffer,
            GST_MAP_WRITE)) {
      pngdec->ret = GST_FLOW_ERROR;
      return;
    }

    data = GST_VIDEO_FRAME_COMP_DATA (&frame, 0);
    offset = row_num * GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0);
    GST_LOG ("got row %u at pass %d, copying in buffer %p at offset %"
        G_GSIZE_FORMAT, (guint) row_num, pass,
        pngdec->current_frame->output_buffer, offset);
    png_progressive_combine_row (pngdec->png, data + offset, new_row);
    gst_video_frame_unmap (&frame);
    pngdec->ret = GST_FLOW_OK;
  } else
    pngdec->ret = GST_FLOW_OK;
}
static gboolean
frame_contains_subtitles (GstBuffer * buff)
{
  GstVideoFrame frame;
  guint x, y, first_sub_pix_x = 0, first_sub_pix_y = 0, last_sub_y = 0;
  guint8 *data;

  gst_video_frame_map (&frame, &glob_video_info, buff, GST_MAP_READ);
  data = frame.data[0];

  for (y = 0; y < GST_VIDEO_INFO_HEIGHT (&glob_video_info); y++) {
    for (x = 0; x < GST_VIDEO_INFO_WIDTH (&glob_video_info) * 3; x += 3) {
      if ((data[x + y * 1920 * 3] != 0x00) ||
          (data[x + y * 1920 * 3 + 1] != 0x00) ||
          (data[x + y * 1920 * 3 + 2] != 0x00)) {

        if (first_sub_pix_x == 0) {
          first_sub_pix_x = x / 3;
          first_sub_pix_y = y;
        }

        last_sub_y = y;
      }
    }
  }

  if (last_sub_y != 0 && last_sub_y != first_sub_pix_y)
    return TRUE;

  return FALSE;
}
示例#11
0
static gboolean
gst_validate_ssim_convert (GstValidateSsim * self, SSimConverterInfo * info,
    GstVideoFrame * frame, GstVideoFrame * converted_frame)
{
  gboolean res = TRUE;
  GstBuffer *outbuf = NULL;

  g_return_val_if_fail (info != NULL, FALSE);

  outbuf = gst_buffer_new_allocate (NULL, info->out_info.size, NULL);
  if (!gst_video_frame_map (converted_frame, &info->out_info, outbuf,
          GST_MAP_WRITE)) {
    GST_VALIDATE_REPORT (self, GENERAL_INPUT_ERROR,
        "Could not map output converted_frame");
    goto fail;
  }

  gst_video_converter_frame (info->converter, frame, converted_frame);

done:
  if (outbuf)
    gst_buffer_unref (outbuf);

  return res;

fail:
  res = FALSE;
  goto done;
}
static GstFlowReturn
openni2_read_gstbuffer (GstOpenni2Src * src, GstBuffer * buf)
{
  openni::Status rc = openni::STATUS_OK;
  openni::VideoStream * pStream = src->depth;
  int changedStreamDummy;
  GstVideoFrame vframe;
  uint64_t oni_ts;

  /* Block until we get some data */
  rc = openni::OpenNI::waitForAnyStream (&pStream, 1, &changedStreamDummy,
      SAMPLE_READ_WAIT_TIMEOUT);
  if (rc != openni::STATUS_OK) {
    GST_ERROR_OBJECT (src, "Frame read timeout: %s",
        openni::OpenNI::getExtendedError ());
    return GST_FLOW_ERROR;
  }

  if (src->depth->isValid () && src->color->isValid () &&
      src->sourcetype == SOURCETYPE_BOTH) {
    rc = src->depth->readFrame (src->depthFrame);
    if (rc != openni::STATUS_OK) {
      GST_ERROR_OBJECT (src, "Frame read error: %s",
          openni::OpenNI::getExtendedError ());
      return GST_FLOW_ERROR;
    }
    rc = src->color->readFrame (src->colorFrame);
    if (rc != openni::STATUS_OK) {
      GST_ERROR_OBJECT (src, "Frame read error: %s",
          openni::OpenNI::getExtendedError ());
      return GST_FLOW_ERROR;
    }

    /* Copy colour information */
    gst_video_frame_map (&vframe, &src->info, buf, GST_MAP_WRITE);

    guint8 *pData = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
    guint8 *pColor = (guint8 *) src->colorFrame->getData ();
    /* Add depth as 8bit alpha channel, depth is 16bit samples. */
    guint16 *pDepth = (guint16 *) src->depthFrame->getData ();

    for (int i = 0; i < src->colorFrame->getHeight (); ++i) {
      for (int j = 0; j < src->colorFrame->getWidth (); ++j) {
        pData[4 * j + 0] = pColor[3 * j + 0];
        pData[4 * j + 1] = pColor[3 * j + 1];
        pData[4 * j + 2] = pColor[3 * j + 2];
        pData[4 * j + 3] = pDepth[j] >> 8;
      }
      pData += GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0);
      pColor += src->colorFrame->getStrideInBytes ();
      pDepth += src->depthFrame->getStrideInBytes () / 2;
    }
    gst_video_frame_unmap (&vframe);

    oni_ts = src->colorFrame->getTimestamp () * 1000;

    GST_LOG_OBJECT (src, "sending buffer (%d+%d)B",
        src->colorFrame->getDataSize (),
        src->depthFrame->getDataSize ());
  } else if (src->depth->isValid () && src->sourcetype == SOURCETYPE_DEPTH) {
static GdkPixbuf *
gst_gdk_pixbuf_sink_get_pixbuf_from_buffer (GstGdkPixbufSink * sink,
    GstBuffer * buf)
{
  GdkPixbuf *pix = NULL;
  GstVideoFrame *frame;
  gint minsize, bytes_per_pixel;

  g_return_val_if_fail (GST_VIDEO_SINK_WIDTH (sink) > 0, NULL);
  g_return_val_if_fail (GST_VIDEO_SINK_HEIGHT (sink) > 0, NULL);

  frame = g_slice_new0 (GstVideoFrame);
  gst_video_frame_map (frame, &sink->info, buf, GST_MAP_READ);

  bytes_per_pixel = (sink->has_alpha) ? 4 : 3;

  /* last row needn't have row padding */
  minsize = (GST_VIDEO_FRAME_COMP_STRIDE (frame, 0) *
      (GST_VIDEO_SINK_HEIGHT (sink) - 1)) +
      (bytes_per_pixel * GST_VIDEO_SINK_WIDTH (sink));

  g_return_val_if_fail (gst_buffer_get_size (buf) >= minsize, NULL);

  gst_buffer_ref (buf);
  pix = gdk_pixbuf_new_from_data (GST_VIDEO_FRAME_COMP_DATA (frame, 0),
      GDK_COLORSPACE_RGB, sink->has_alpha, 8, GST_VIDEO_SINK_WIDTH (sink),
      GST_VIDEO_SINK_HEIGHT (sink), GST_VIDEO_FRAME_COMP_STRIDE (frame, 0),
      (GdkPixbufDestroyNotify) gst_gdk_pixbuf_sink_pixbuf_destroy_notify,
      frame);

  return pix;
}
示例#14
0
static void
user_endrow_callback (png_structp png_ptr, png_bytep new_row,
    png_uint_32 row_num, int pass)
{
  GstPngDec *pngdec = NULL;

  pngdec = GST_PNGDEC (png_get_io_ptr (png_ptr));

  /* FIXME: implement interlaced pictures */

  /* If buffer_out doesn't exist, it means buffer_alloc failed, which 
   * will already have set the return code */
  if (GST_IS_BUFFER (pngdec->current_frame->output_buffer)) {
    GstVideoFrame frame;
    GstBuffer *buffer = pngdec->current_frame->output_buffer;
    size_t offset;
    gint width;
    guint8 *data;

    if (!gst_video_frame_map (&frame, &pngdec->output_state->info, buffer,
            GST_MAP_WRITE)) {
      pngdec->ret = GST_FLOW_ERROR;
      return;
    }

    data = GST_VIDEO_FRAME_COMP_DATA (&frame, 0);
    offset = row_num * GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0);
    GST_LOG ("got row %u, copying in buffer %p at offset %" G_GSIZE_FORMAT,
        (guint) row_num, pngdec->current_frame->output_buffer, offset);
    width = GST_ROUND_UP_4 (png_get_rowbytes (pngdec->png, pngdec->info));
    memcpy (data + offset, new_row, width);
    gst_video_frame_unmap (&frame);
    pngdec->ret = GST_FLOW_OK;
  }
}
示例#15
0
static CoglBool
cogl_gst_ayuv_upload (CoglGstVideoSink *sink,
                      GstBuffer *buffer)
{
  CoglGstVideoSinkPrivate *priv = sink->priv;
  CoglPixelFormat format = COGL_PIXEL_FORMAT_RGBA_8888;
  GstVideoFrame frame;

  if (!gst_video_frame_map (&frame, &priv->info, buffer, GST_MAP_READ))
    goto map_fail;

  clear_frame_textures (sink);

  priv->frame[0] = video_texture_new_from_data (priv->ctx, priv->info.width,
                                                priv->info.height,
                                                format, format,
                                                priv->info.stride[0],
                                                frame.data[0], NULL);

  gst_video_frame_unmap (&frame);

  return TRUE;

map_fail:
  {
    GST_ERROR_OBJECT (sink, "Could not map incoming video frame");
    return FALSE;
  }
}
static GstFlowReturn
gst_cacasink_render (GstBaseSink * basesink, GstBuffer * buffer)
{
  GstCACASink *cacasink = GST_CACASINK (basesink);
  GstVideoFrame frame;

  GST_DEBUG ("render");

  if (!gst_video_frame_map (&frame, &cacasink->info, buffer, GST_MAP_READ))
    goto invalid_frame;

  caca_clear ();
  caca_draw_bitmap (0, 0, cacasink->screen_width - 1,
      cacasink->screen_height - 1, cacasink->bitmap,
      GST_VIDEO_FRAME_PLANE_DATA (&frame, 0));
  caca_refresh ();

  gst_video_frame_unmap (&frame);

  return GST_FLOW_OK;

  /* ERRORS */
invalid_frame:
  {
    GST_ERROR_OBJECT (cacasink, "invalid frame received");
    return GST_FLOW_ERROR;
  }
}
示例#17
0
static GstFlowReturn
gst_mpeg2dec_alloc_buffer (GstMpeg2dec * mpeg2dec, GstVideoCodecFrame * frame,
    GstBuffer ** buffer)
{
  GstFlowReturn ret;
  GstVideoFrame vframe;
  guint8 *buf[3];

  ret =
      gst_mpeg2dec_alloc_sized_buf (mpeg2dec, mpeg2dec->decoded_info.size,
      frame, buffer);
  if (G_UNLIKELY (ret != GST_FLOW_OK))
    goto beach;

  if (mpeg2dec->need_cropping && mpeg2dec->has_cropping) {
    GstVideoCropMeta *crop;
    GstVideoCodecState *state;
    GstVideoInfo *vinfo;

    state = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (mpeg2dec));
    vinfo = &state->info;

    crop = gst_buffer_add_video_crop_meta (frame->output_buffer);
    /* we can do things slightly more efficient when we know that
     * downstream understands clipping */
    crop->x = 0;
    crop->y = 0;
    crop->width = vinfo->width;
    crop->height = vinfo->height;

    gst_video_codec_state_unref (state);
  }

  if (!gst_video_frame_map (&vframe, &mpeg2dec->decoded_info, *buffer,
          GST_MAP_READ | GST_MAP_WRITE))
    goto map_fail;

  buf[0] = GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
  buf[1] = GST_VIDEO_FRAME_PLANE_DATA (&vframe, 1);
  buf[2] = GST_VIDEO_FRAME_PLANE_DATA (&vframe, 2);

  GST_DEBUG_OBJECT (mpeg2dec, "set_buf: %p %p %p, frame %i",
      buf[0], buf[1], buf[2], frame->system_frame_number);

  /* Note: We use a non-null 'id' value to make the distinction
   * between the dummy buffers (which have an id of NULL) and the
   * ones we did */
  mpeg2_set_buf (mpeg2dec->decoder, buf,
      GINT_TO_POINTER (frame->system_frame_number + 1));
  gst_mpeg2dec_save_buffer (mpeg2dec, frame->system_frame_number, &vframe);

beach:
  return ret;

map_fail:
  {
    GST_ERROR_OBJECT (mpeg2dec, "Failed to map frame");
    return GST_FLOW_ERROR;
  }
}
示例#18
0
static GstBuffer *
gst_core_media_buffer_new_from_buffer (GstBuffer * buf, GstVideoInfo * info)
{
  gboolean ret;
  GstBuffer *copy_buf;
  GstVideoFrame dest, src;
  GstAllocator *allocator;

  allocator = gst_allocator_find (GST_ALLOCATOR_SYSMEM);
  if (!allocator) {
    GST_ERROR ("Could not find SYSMEM allocator");
    return NULL;
  }

  copy_buf = gst_buffer_new_allocate (allocator, info->size, NULL);

  gst_object_unref (allocator);

  if (!gst_video_frame_map (&dest, info, copy_buf, GST_MAP_WRITE)) {
    GST_ERROR ("Could not map destination frame");
    goto error;
  }

  if (!gst_video_frame_map (&src, info, buf, GST_MAP_READ)) {
    GST_ERROR ("Could not map source frame");
    gst_video_frame_unmap (&dest);
    goto error;
  }

  ret = gst_video_frame_copy (&dest, &src);

  gst_video_frame_unmap (&dest);
  gst_video_frame_unmap (&src);

  if (!ret) {
    GST_ERROR ("Could not copy frame");
    goto error;
  }

  return copy_buf;

error:
  if (copy_buf) {
    gst_buffer_unref (copy_buf);
  }
  return NULL;
}
示例#19
0
static GstFlowReturn
gst_video_filter_transform (GstBaseTransform * trans, GstBuffer * inbuf,
    GstBuffer * outbuf)
{
  GstFlowReturn res;
  GstVideoFilter *filter = GST_VIDEO_FILTER_CAST (trans);
  GstVideoFilterClass *fclass;

  if (G_UNLIKELY (!filter->negotiated))
    goto unknown_format;

  fclass = GST_VIDEO_FILTER_GET_CLASS (filter);
  if (fclass->transform_frame) {
    GstVideoFrame in_frame, out_frame;

    if (!gst_video_frame_map (&in_frame, &filter->in_info, inbuf, GST_MAP_READ))
      goto invalid_buffer;

    if (!gst_video_frame_map (&out_frame, &filter->out_info, outbuf,
            GST_MAP_WRITE))
      goto invalid_buffer;

    res = fclass->transform_frame (filter, &in_frame, &out_frame);

    gst_video_frame_unmap (&out_frame);
    gst_video_frame_unmap (&in_frame);
  } else {
    GST_DEBUG_OBJECT (trans, "no transform_frame vmethod");
    res = GST_FLOW_OK;
  }

  return res;

  /* ERRORS */
unknown_format:
  {
    GST_ELEMENT_ERROR (filter, CORE, NOT_IMPLEMENTED, (NULL),
        ("unknown format"));
    return GST_FLOW_NOT_NEGOTIATED;
  }
invalid_buffer:
  {
    GST_ELEMENT_WARNING (filter, CORE, NOT_IMPLEMENTED, (NULL),
        ("invalid video buffer received"));
    return GST_FLOW_OK;
  }
}
示例#20
0
static GstFlowReturn
gst_mpeg2dec_crop_buffer (GstMpeg2dec * dec, GstVideoCodecFrame * in_frame,
    GstVideoFrame * input_vframe)
{
  GstVideoCodecState *state;
  GstVideoInfo *info;
  GstVideoInfo *dinfo;
  GstVideoFrame output_frame;
  GstFlowReturn ret;
  GstBuffer *buffer = NULL;

  state = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (dec));
  info = &state->info;
  dinfo = &dec->decoded_info;

  GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, dec,
      "Copying input buffer %ux%u (%" G_GSIZE_FORMAT ") to output buffer "
      "%ux%u (%" G_GSIZE_FORMAT ")", dinfo->width, dinfo->height,
      dinfo->size, info->width, info->height, info->size);

  ret = gst_buffer_pool_acquire_buffer (dec->downstream_pool, &buffer, NULL);
  if (ret != GST_FLOW_OK)
    goto beach;

  if (!gst_video_frame_map (&output_frame, info, buffer, GST_MAP_WRITE))
    goto map_fail;

  if (in_frame->output_buffer)
    gst_buffer_unref (in_frame->output_buffer);
  in_frame->output_buffer = buffer;

  if (!gst_video_frame_copy (&output_frame, input_vframe))
    goto copy_failed;

  gst_video_frame_unmap (&output_frame);

  GST_BUFFER_FLAGS (in_frame->output_buffer) =
      GST_BUFFER_FLAGS (input_vframe->buffer);

beach:
  gst_video_codec_state_unref (state);

  return ret;

map_fail:
  {
    GST_ERROR_OBJECT (dec, "Failed to map output frame");
    gst_video_codec_state_unref (state);
    return GST_FLOW_ERROR;
  }

copy_failed:
  {
    GST_ERROR_OBJECT (dec, "Failed to copy output frame");
    gst_video_codec_state_unref (state);
    return GST_FLOW_ERROR;
  }
}
示例#21
0
static void gub_copy_texture_opengl(GUBGraphicContext *gcontext, GstVideoInfo *video_info, GstBuffer *buffer, void *native_texture_ptr)
{
    if (native_texture_ptr)
    {
        GLuint gltex = (GLuint)(size_t)(native_texture_ptr);
        GstVideoFrame video_frame;
        glBindTexture(GL_TEXTURE_2D, gltex);
        gst_video_frame_map(&video_frame, video_info, buffer, GST_MAP_READ);
        glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, video_info->width, video_info->height, GL_RGB, GL_UNSIGNED_BYTE, GST_VIDEO_FRAME_PLANE_DATA(&video_frame, 0));
        gst_video_frame_unmap(&video_frame);
    }
}
示例#22
0
static GstFlowReturn
gst_yadif_transform (GstBaseTransform * trans, GstBuffer * inbuf,
    GstBuffer * outbuf)
{
  GstYadif *yadif = GST_YADIF (trans);
  int parity;
  int tff;

  parity = 0;
  tff = 0;

  if (!gst_video_frame_map (&yadif->dest_frame, &yadif->video_info, outbuf,
          GST_MAP_WRITE))
    goto dest_map_failed;

  if (!gst_video_frame_map (&yadif->cur_frame, &yadif->video_info, inbuf,
          GST_MAP_READ))
    goto src_map_failed;

  yadif->next_frame = yadif->cur_frame;
  yadif->prev_frame = yadif->cur_frame;

  yadif_filter (yadif, parity, tff);

  gst_video_frame_unmap (&yadif->dest_frame);
  gst_video_frame_unmap (&yadif->cur_frame);
  return GST_FLOW_OK;

dest_map_failed:
  {
    GST_ERROR_OBJECT (yadif, "failed to map dest");
    return GST_FLOW_ERROR;
  }
src_map_failed:
  {
    GST_ERROR_OBJECT (yadif, "failed to map src");
    gst_video_frame_unmap (&yadif->dest_frame);
    return GST_FLOW_ERROR;
  }
}
示例#23
0
/* Allocate buffer and copy image data into Y444 format */
static GstFlowReturn
daala_handle_image (GstDaalaDec * dec, od_img * img, GstVideoCodecFrame * frame)
{
  GstVideoDecoder *decoder = GST_VIDEO_DECODER (dec);
  gint width, height, stride;
  GstFlowReturn result;
  gint i, comp;
  guint8 *dest, *src;
  GstVideoFrame vframe;

  result = gst_video_decoder_allocate_output_frame (decoder, frame);

  if (G_UNLIKELY (result != GST_FLOW_OK)) {
    GST_DEBUG_OBJECT (dec, "could not get buffer, reason: %s",
        gst_flow_get_name (result));
    return result;
  }

  /* if only libdaala would allow us to give it a destination frame */
  GST_CAT_TRACE_OBJECT (GST_CAT_PERFORMANCE, dec,
      "doing unavoidable video frame copy");

  if (G_UNLIKELY (!gst_video_frame_map (&vframe, &dec->output_state->info,
              frame->output_buffer, GST_MAP_WRITE)))
    goto invalid_frame;

  for (comp = 0; comp < 3; comp++) {
    width = GST_VIDEO_FRAME_COMP_WIDTH (&vframe, comp);
    height = GST_VIDEO_FRAME_COMP_HEIGHT (&vframe, comp);
    stride = GST_VIDEO_FRAME_COMP_STRIDE (&vframe, comp);
    dest = GST_VIDEO_FRAME_COMP_DATA (&vframe, comp);

    src = img->planes[comp].data;

    for (i = 0; i < height; i++) {
      memcpy (dest, src, width);

      dest += stride;
      src += img->planes[comp].ystride;
    }
  }
  gst_video_frame_unmap (&vframe);

  return GST_FLOW_OK;
invalid_frame:
  {
    GST_DEBUG_OBJECT (dec, "could not map video frame");
    return GST_FLOW_ERROR;
  }
}
示例#24
0
static GstVTEncFrame *
gst_vtenc_frame_new (GstBuffer * buf, GstVideoInfo * video_info)
{
  GstVTEncFrame *frame;

  frame = g_slice_new (GstVTEncFrame);
  frame->buf = gst_buffer_ref (buf);
  if (!gst_video_frame_map (&frame->videoframe, video_info, buf, GST_MAP_READ)) {
    gst_buffer_unref (frame->buf);
    g_slice_free (GstVTEncFrame, frame);
    return NULL;
  }

  return frame;
}
static gboolean
_upload_memory (GstGLUpload * upload)
{
    guint in_width, in_height;
    guint in_texture[GST_VIDEO_MAX_PLANES];
    GstBuffer *inbuf;
    GstVideoFrame out_frame;
    GstVideoInfo out_info;
    gint i;

    in_width = GST_VIDEO_INFO_WIDTH (&upload->in_info);
    in_height = GST_VIDEO_INFO_HEIGHT (&upload->in_info);

    if (!upload->initted) {
        if (!_init_upload (upload)) {
            return FALSE;
        }
    }

    inbuf = gst_buffer_new ();
    for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&upload->in_info); i++) {
        in_texture[i] = upload->in_tex[i]->tex_id;
        gst_buffer_append_memory (inbuf,
                                  gst_memory_ref ((GstMemory *) upload->in_tex[i]));
    }

    GST_TRACE ("uploading with textures:%u,%u,%u dimensions:%ux%u",
               in_texture[0], in_texture[1], in_texture[2], in_width, in_height);

    upload->priv->outbuf = gst_gl_color_convert_perform (upload->convert, inbuf);
    gst_buffer_unref (inbuf);

    gst_video_info_set_format (&out_info, GST_VIDEO_FORMAT_RGBA, in_width,
                               in_height);
    if (!gst_video_frame_map (&out_frame, &out_info, upload->priv->outbuf,
                              GST_MAP_READ | GST_MAP_GL)) {
        gst_buffer_unref (upload->priv->outbuf);
        upload->priv->outbuf = NULL;
        return FALSE;
    }

    upload->out_tex->tex_id = *(guint *) out_frame.data[0];

    gst_video_frame_unmap (&out_frame);
    upload->priv->released = FALSE;

    return TRUE;
}
示例#26
0
static GstFlowReturn
gst_av1_enc_handle_frame (GstVideoEncoder * encoder, GstVideoCodecFrame * frame)
{
  GstAV1Enc *av1enc = GST_AV1_ENC_CAST (encoder);
  aom_image_t raw;
  int flags = 0;
  GstFlowReturn ret = GST_FLOW_OK;
  GstVideoFrame vframe;

  if (!aom_img_alloc (&raw, AOM_IMG_FMT_I420, av1enc->aom_cfg.g_w,
          av1enc->aom_cfg.g_h, 1)) {
    GST_ERROR_OBJECT (encoder, "Failed to initialize encoder");
    return FALSE;
  }

  gst_video_frame_map (&vframe, &av1enc->input_state->info,
      frame->input_buffer, GST_MAP_READ);
  gst_av1_enc_fill_image (av1enc, &vframe, &raw);
  gst_video_frame_unmap (&vframe);

  if (av1enc->keyframe_dist >= 30) {
    av1enc->keyframe_dist = 0;
    flags |= AOM_EFLAG_FORCE_KF;
  }
  av1enc->keyframe_dist++;

  g_mutex_lock (&av1enc->encoder_lock);
  if (aom_codec_encode (&av1enc->encoder, &raw, frame->pts, 1, flags)
      != AOM_CODEC_OK) {
    gst_av1_codec_error (&av1enc->encoder, "Failed to encode frame");
    ret = GST_FLOW_ERROR;
  }
  g_mutex_unlock (&av1enc->encoder_lock);

  aom_img_free (&raw);
  gst_video_codec_frame_unref (frame);

  if (ret == GST_FLOW_ERROR)
    return ret;

  ret = gst_av1_enc_process (av1enc);

  if (ret == GST_FLOW_CUSTOM_SUCCESS)
    ret = GST_FLOW_OK;

  return ret;
}
示例#27
0
static GstFlowReturn
gst_video_filter_transform_ip (GstBaseTransform * trans, GstBuffer * buf)
{
  GstFlowReturn res;
  GstVideoFilter *filter = GST_VIDEO_FILTER_CAST (trans);
  GstVideoFilterClass *fclass;

  if (G_UNLIKELY (!filter->negotiated))
    goto unknown_format;

  fclass = GST_VIDEO_FILTER_GET_CLASS (filter);
  if (fclass->transform_frame_ip) {
    GstVideoFrame frame;
    GstMapFlags flags;

    flags = GST_MAP_READ;

    if (!gst_base_transform_is_passthrough (trans))
      flags |= GST_MAP_WRITE;

    if (!gst_video_frame_map (&frame, &filter->in_info, buf, flags))
      goto invalid_buffer;

    res = fclass->transform_frame_ip (filter, &frame);

    gst_video_frame_unmap (&frame);
  } else {
    GST_DEBUG_OBJECT (trans, "no transform_frame_ip vmethod");
    res = GST_FLOW_OK;
  }

  return res;

  /* ERRORS */
unknown_format:
  {
    GST_ELEMENT_ERROR (filter, CORE, NOT_IMPLEMENTED, (NULL),
        ("unknown format"));
    return GST_FLOW_NOT_NEGOTIATED;
  }
invalid_buffer:
  {
    GST_ELEMENT_WARNING (filter, CORE, NOT_IMPLEMENTED, (NULL),
        ("invalid video buffer received"));
    return GST_FLOW_OK;
  }
}
PassRefPtr<BitmapTexture> MediaPlayerPrivateGStreamerBase::updateTexture(TextureMapper* textureMapper)
{
    WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
    if (!GST_IS_SAMPLE(m_sample.get()))
        return nullptr;

    GstCaps* caps = gst_sample_get_caps(m_sample.get());
    if (!caps)
        return nullptr;

    GstVideoInfo videoInfo;
    gst_video_info_init(&videoInfo);
    if (!gst_video_info_from_caps(&videoInfo, caps))
        return nullptr;

    IntSize size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
    RefPtr<BitmapTexture> texture = textureMapper->acquireTextureFromPool(size, GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? BitmapTexture::SupportsAlpha : BitmapTexture::NoFlag);
    GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());

#if GST_CHECK_VERSION(1, 1, 0)
    GstVideoGLTextureUploadMeta* meta;
    if ((meta = gst_buffer_get_video_gl_texture_upload_meta(buffer))) {
        if (meta->n_textures == 1) { // BRGx & BGRA formats use only one texture.
            const BitmapTextureGL* textureGL = static_cast<const BitmapTextureGL*>(texture.get());
            guint ids[4] = { textureGL->id(), 0, 0, 0 };

            if (gst_video_gl_texture_upload_meta_upload(meta, ids))
                return texture;
        }
    }
#endif

    // Right now the TextureMapper only supports chromas with one plane
    ASSERT(GST_VIDEO_INFO_N_PLANES(&videoInfo) == 1);

    GstVideoFrame videoFrame;
    if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, GST_MAP_READ))
        return nullptr;

    int stride = GST_VIDEO_FRAME_PLANE_STRIDE(&videoFrame, 0);
    const void* srcData = GST_VIDEO_FRAME_PLANE_DATA(&videoFrame, 0);
    texture->updateContents(srcData, WebCore::IntRect(WebCore::IntPoint(0, 0), size), WebCore::IntPoint(0, 0), stride, BitmapTexture::UpdateCannotModifyOriginalImageData);
    gst_video_frame_unmap(&videoFrame);

    return texture;
}
示例#29
0
static FrameData *
gst_x265_enc_queue_frame (GstX265Enc * enc, GstVideoCodecFrame * frame,
    GstVideoInfo * info)
{
  GstVideoFrame vframe;
  FrameData *fdata;

  if (!gst_video_frame_map (&vframe, info, frame->input_buffer, GST_MAP_READ))
    return NULL;

  fdata = g_slice_new (FrameData);
  fdata->frame = gst_video_codec_frame_ref (frame);
  fdata->vframe = vframe;

  enc->pending_frames = g_list_prepend (enc->pending_frames, fdata);

  return fdata;
}
示例#30
0
static CoglBool
cogl_gst_yv12_upload (CoglGstVideoSink *sink,
                      GstBuffer *buffer)
{
  CoglGstVideoSinkPrivate *priv = sink->priv;
  GstVideoFrame frame;
  CoglPixelFormat format = COGL_PIXEL_FORMAT_A_8;

  if (!gst_video_frame_map (&frame, &priv->info, buffer, GST_MAP_READ))
    goto map_fail;

  clear_frame_textures (sink);

  priv->frame[0] =
    video_texture_new_from_data (priv->ctx,
                                 GST_VIDEO_INFO_COMP_WIDTH (&priv->info, 0),
                                 GST_VIDEO_INFO_COMP_HEIGHT (&priv->info, 0),
                                 format, format,
                                 priv->info.stride[0], frame.data[0], NULL);

  priv->frame[1] =
    video_texture_new_from_data (priv->ctx,
                                 GST_VIDEO_INFO_COMP_WIDTH (&priv->info, 1),
                                 GST_VIDEO_INFO_COMP_HEIGHT (&priv->info, 1),
                                 format, format,
                                 priv->info.stride[1], frame.data[1], NULL);

  priv->frame[2] =
    video_texture_new_from_data (priv->ctx,
                                 GST_VIDEO_INFO_COMP_WIDTH (&priv->info, 2),
                                 GST_VIDEO_INFO_COMP_HEIGHT (&priv->info, 2),
                                 format, format,
                                 priv->info.stride[2], frame.data[2], NULL);

  gst_video_frame_unmap (&frame);

  return TRUE;

map_fail:
  {
    GST_ERROR_OBJECT (sink, "Could not map incoming video frame");
    return FALSE;
  }
}