Exemplo n.º 1
0
static gboolean
gst_wl_shm_validate_video_info (const GstVideoInfo * vinfo)
{
  gint height = GST_VIDEO_INFO_HEIGHT (vinfo);
  gint base_stride = GST_VIDEO_INFO_PLANE_STRIDE (vinfo, 0);
  gsize base_offs = GST_VIDEO_INFO_PLANE_OFFSET (vinfo, 0);
  gint i;
  gsize offs = 0;

  for (i = 0; i < GST_VIDEO_INFO_N_PLANES (vinfo); i++) {
    guint32 estride;

    /* Overwrite the video info's stride and offset using the pitch calculcated
     * by the kms driver. */
    estride = gst_wl_shm_extrapolate_stride (vinfo->finfo, i, base_stride);

    if (estride != GST_VIDEO_INFO_PLANE_STRIDE (vinfo, i))
      return FALSE;

    if (GST_VIDEO_INFO_PLANE_OFFSET (vinfo, i) - base_offs != offs)
      return FALSE;

    /* Note that we cannot negotiate special padding betweem each planes,
     * hence using the display height here. */
    offs +=
        estride * GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (vinfo->finfo, i, height);
  }

  if (vinfo->size < offs)
    return FALSE;

  return TRUE;
}
Exemplo n.º 2
0
void
yadif_filter (GstYadif * yadif, int parity, int tff)
{
  int y, i;
  const GstVideoInfo *vi = &yadif->video_info;
  const GstVideoFormatInfo *vfi = vi->finfo;

  for (i = 0; i < GST_VIDEO_FORMAT_INFO_N_COMPONENTS (vfi); i++) {
    int w = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (vfi, i, vi->width);
    int h = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (vfi, i, vi->height);
    int refs = GST_VIDEO_INFO_COMP_STRIDE (vi, i);
    int df = GST_VIDEO_INFO_COMP_PSTRIDE (vi, i);
    guint8 *prev_data = GST_VIDEO_FRAME_COMP_DATA (&yadif->prev_frame, i);
    guint8 *cur_data = GST_VIDEO_FRAME_COMP_DATA (&yadif->cur_frame, i);
    guint8 *next_data = GST_VIDEO_FRAME_COMP_DATA (&yadif->next_frame, i);
    guint8 *dest_data = GST_VIDEO_FRAME_COMP_DATA (&yadif->dest_frame, i);

    for (y = 0; y < h; y++) {
      if ((y ^ parity) & 1) {
        guint8 *prev = prev_data + y * refs;
        guint8 *cur = cur_data + y * refs;
        guint8 *next = next_data + y * refs;
        guint8 *dst = dest_data + y * refs;
        int mode = ((y == 1) || (y + 2 == h)) ? 2 : yadif->mode;
#if HAVE_CPU_X86_64
        if (0) {
          filter_line_c (dst, prev, cur, next, w,
              y + 1 < h ? refs : -refs, y ? -refs : refs, parity ^ tff, mode);
        } else {
          filter_line_x86_64 (dst, prev, cur, next, w,
              y + 1 < h ? refs : -refs, y ? -refs : refs, parity ^ tff, mode);
        }
#else
        filter_line_c (dst, prev, cur, next, w,
            y + 1 < h ? refs : -refs, y ? -refs : refs, parity ^ tff, mode);
#endif
      } else {
        guint8 *dst = dest_data + y * refs;
        guint8 *cur = cur_data + y * refs;

        memcpy (dst, cur, w * df);
      }
    }
  }

#if 0
  emms_c ();
#endif
}
Exemplo n.º 3
0
/**
 * gst_gl_get_plane_data_size:
 * @info: a #GstVideoInfo
 * @align: a #GstVideoAlignment or %NULL
 * @plane: plane number in @info to retrieve the data size of
 *
 * Retrieve the size in bytes of a video plane of data with a certain alignment
 */
gsize
gst_gl_get_plane_data_size (GstVideoInfo * info, GstVideoAlignment * align,
    guint plane)
{
  gint padded_height;
  gsize plane_size;

  padded_height = info->height;

  if (align)
    padded_height += align->padding_top + align->padding_bottom;

  padded_height =
      GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (info->finfo, plane, padded_height);

  plane_size = GST_VIDEO_INFO_PLANE_STRIDE (info, plane) * padded_height;

  return plane_size;
}
Exemplo n.º 4
0
/* Allocate buffer and copy image data into Y444 format */
static GstFlowReturn
theora_handle_image (GstTheoraDec * dec, th_ycbcr_buffer buf,
    GstVideoCodecFrame * frame)
{
  GstVideoDecoder *decoder = GST_VIDEO_DECODER (dec);
  gint width, height, stride;
  GstFlowReturn result;
  gint i, comp;
  guint8 *dest, *src;
  GstVideoFrame vframe;
  gint pic_width, pic_height;
  gint offset_x, offset_y;

  result = gst_video_decoder_allocate_output_frame (decoder, frame);

  if (G_UNLIKELY (result != GST_FLOW_OK)) {
    GST_DEBUG_OBJECT (dec, "could not get buffer, reason: %s",
        gst_flow_get_name (result));
    return result;
  }

  if (!dec->can_crop) {
    /* we need to crop the hard way */
    offset_x = dec->info.pic_x;
    offset_y = dec->info.pic_y;
    pic_width = dec->info.pic_width;
    pic_height = dec->info.pic_height;
    /* Ensure correct offsets in chroma for formats that need it
     * by rounding the offset. libtheora will add proper pixels,
     * so no need to handle them ourselves. */
    if (offset_x & 1 && dec->info.pixel_fmt != TH_PF_444)
      offset_x--;
    if (offset_y & 1 && dec->info.pixel_fmt == TH_PF_420)
      offset_y--;
  } else {
    /* copy the whole frame */
    offset_x = 0;
    offset_y = 0;
    pic_width = dec->info.frame_width;
    pic_height = dec->info.frame_height;

    if (dec->info.pic_width != dec->info.frame_width ||
        dec->info.pic_height != dec->info.frame_height ||
        dec->info.pic_x != 0 || dec->info.pic_y != 0) {
      GstVideoMeta *vmeta;
      GstVideoCropMeta *cmeta;

      vmeta = gst_buffer_get_video_meta (frame->output_buffer);
      /* If the buffer pool didn't add the meta already
       * we add it ourselves here */
      if (!vmeta)
        vmeta = gst_buffer_add_video_meta (frame->output_buffer,
            GST_VIDEO_FRAME_FLAG_NONE,
            dec->output_state->info.finfo->format,
            dec->info.frame_width, dec->info.frame_height);

      /* Just to be sure that the buffer pool doesn't do something
       * completely weird and we would crash later
       */
      g_assert (vmeta->format == dec->output_state->info.finfo->format);
      g_assert (vmeta->width == dec->info.frame_width);
      g_assert (vmeta->height == dec->info.frame_height);

      cmeta = gst_buffer_add_video_crop_meta (frame->output_buffer);

      /* we can do things slightly more efficient when we know that
       * downstream understands clipping */
      cmeta->x = dec->info.pic_x;
      cmeta->y = dec->info.pic_y;
      cmeta->width = dec->info.pic_width;
      cmeta->height = dec->info.pic_height;
    }
  }

  /* if only libtheora would allow us to give it a destination frame */
  GST_CAT_TRACE_OBJECT (GST_CAT_PERFORMANCE, dec,
      "doing unavoidable video frame copy");

  if (G_UNLIKELY (!gst_video_frame_map (&vframe, &dec->output_state->info,
              frame->output_buffer, GST_MAP_WRITE)))
    goto invalid_frame;

  for (comp = 0; comp < 3; comp++) {
    width =
        GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (vframe.info.finfo, comp, pic_width);
    height =
        GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (vframe.info.finfo, comp,
        pic_height);
    stride = GST_VIDEO_FRAME_COMP_STRIDE (&vframe, comp);
    dest = GST_VIDEO_FRAME_COMP_DATA (&vframe, comp);

    src = buf[comp].data;
    src += ((height == pic_height) ? offset_y : offset_y / 2)
        * buf[comp].stride;
    src += (width == pic_width) ? offset_x : offset_x / 2;

    for (i = 0; i < height; i++) {
      memcpy (dest, src, width);

      dest += stride;
      src += buf[comp].stride;
    }
  }
  gst_video_frame_unmap (&vframe);

  return GST_FLOW_OK;
invalid_frame:
  {
    GST_DEBUG_OBJECT (dec, "could not map video frame");
    return GST_FLOW_ERROR;
  }
}
Exemplo n.º 5
0
/**
 * gst_video_info_align:
 * @info: a #GstVideoInfo
 * @align: alignment parameters
 *
 * Adjust the offset and stride fields in @info so that the padding and
 * stride alignment in @align is respected.
 *
 * Extra padding will be added to the right side when stride alignment padding
 * is required and @align will be updated with the new padding values.
 */
void
gst_video_info_align (GstVideoInfo * info, GstVideoAlignment * align)
{
  const GstVideoFormatInfo *vinfo = info->finfo;
  gint width, height;
  gint padded_width, padded_height;
  gint i, n_planes;
  gboolean aligned;

  width = GST_VIDEO_INFO_WIDTH (info);
  height = GST_VIDEO_INFO_HEIGHT (info);

  GST_LOG ("padding %u-%ux%u-%u", align->padding_top,
      align->padding_left, align->padding_right, align->padding_bottom);

  n_planes = GST_VIDEO_INFO_N_PLANES (info);

  if (GST_VIDEO_FORMAT_INFO_HAS_PALETTE (vinfo))
    n_planes--;

  /* first make sure the left padding does not cause alignment problems later */
  do {
    GST_LOG ("left padding %u", align->padding_left);
    aligned = TRUE;
    for (i = 0; i < n_planes; i++) {
      gint hedge;

      /* this is the amout of pixels to add as left padding */
      hedge = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (vinfo, i, align->padding_left);
      hedge *= GST_VIDEO_FORMAT_INFO_PSTRIDE (vinfo, i);

      GST_LOG ("plane %d, padding %d, alignment %u", i, hedge,
          align->stride_align[i]);
      aligned &= (hedge & align->stride_align[i]) == 0;
    }
    if (aligned)
      break;

    GST_LOG ("unaligned padding, increasing padding");
    /* increase padded_width */
    align->padding_left += align->padding_left & ~(align->padding_left - 1);
  } while (!aligned);

  /* add the padding */
  padded_width = width + align->padding_left + align->padding_right;
  padded_height = height + align->padding_top + align->padding_bottom;

  do {
    GST_LOG ("padded dimension %u-%u", padded_width, padded_height);

    info->width = padded_width;
    info->height = padded_height;
    fill_planes (info);

    /* check alignment */
    aligned = TRUE;
    for (i = 0; i < n_planes; i++) {
      GST_LOG ("plane %d, stride %d, alignment %u", i, info->stride[i],
          align->stride_align[i]);
      aligned &= (info->stride[i] & align->stride_align[i]) == 0;
    }
    if (aligned)
      break;

    GST_LOG ("unaligned strides, increasing dimension");
    /* increase padded_width */
    padded_width += padded_width & ~(padded_width - 1);
  } while (!aligned);

  align->padding_right = padded_width - width - align->padding_left;

  info->width = width;
  info->height = height;

  for (i = 0; i < n_planes; i++) {
    gint vedge, hedge, comp;

    /* Find the component for this plane, FIXME, we assume the plane number and
     * component number is the same for now, for scaling the dimensions this is
     * currently true for all formats but it might not be when adding new
     * formats. We might need to add a plane subsamling in the format info to
     * make this more generic or maybe use a plane -> component mapping. */
    comp = i;

    hedge =
        GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (vinfo, comp, align->padding_left);
    vedge =
        GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (vinfo, comp, align->padding_top);

    GST_DEBUG ("plane %d: comp: %d, hedge %d vedge %d align %d stride %d", i,
        comp, hedge, vedge, align->stride_align[i], info->stride[i]);

    info->offset[i] += (vedge * info->stride[i]) +
        (hedge * GST_VIDEO_FORMAT_INFO_PSTRIDE (vinfo, comp));
  }
}
Exemplo n.º 6
0
static void
gst_raw_video_parse_update_info (GstRawVideoParseConfig * config)
{
  guint i;
  guint n_planes;
  guint last_plane;
  gsize last_plane_offset, last_plane_size;
  GstVideoInfo *info = &(config->info);

  GST_DEBUG ("updating info with width %u height %u format %s "
      " custom plane strides&offsets %d", config->width, config->height,
      gst_video_format_to_string (config->format),
      config->custom_plane_strides);

  gst_video_info_set_format (info, config->format, config->width,
      config->height);

  GST_VIDEO_INFO_PAR_N (info) = config->pixel_aspect_ratio_n;
  GST_VIDEO_INFO_PAR_D (info) = config->pixel_aspect_ratio_d;
  GST_VIDEO_INFO_FPS_N (info) = config->framerate_n;
  GST_VIDEO_INFO_FPS_D (info) = config->framerate_d;
  GST_VIDEO_INFO_INTERLACE_MODE (info) =
      config->interlaced ? GST_VIDEO_INTERLACE_MODE_INTERLEAVED :
      GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;

  /* Check if there are custom plane strides & offsets that need to be preserved */
  if (config->custom_plane_strides) {
    /* In case there are, overwrite the offsets&strides computed by
     * gst_video_info_set_format with the custom ones */
    for (i = 0; i < GST_VIDEO_MAX_PLANES; ++i) {
      GST_VIDEO_INFO_PLANE_OFFSET (info, i) = config->plane_offsets[i];
      GST_VIDEO_INFO_PLANE_STRIDE (info, i) = config->plane_strides[i];
    }
  } else {
    /* No custom planes&offsets; copy the computed ones into
     * the plane_offsets & plane_strides arrays to ensure they
     * are equal to the ones in the videoinfo */
    for (i = 0; i < GST_VIDEO_MAX_PLANES; ++i) {
      config->plane_offsets[i] = GST_VIDEO_INFO_PLANE_OFFSET (info, i);
      config->plane_strides[i] = GST_VIDEO_INFO_PLANE_STRIDE (info, i);
    }
  }

  n_planes = GST_VIDEO_INFO_N_PLANES (info);
  if (n_planes < 1)
    n_planes = 1;

  /* Figure out what plane is the physically last one. Typically
   * this is the last plane in the list (= at index n_planes-1).
   * However, this is not guaranteed, so we have to scan the offsets
   * to find the last plane. */
  last_plane_offset = 0;
  last_plane = 0;
  for (i = 0; i < n_planes; ++i) {
    gsize plane_offset = GST_VIDEO_INFO_PLANE_OFFSET (info, i);
    if (plane_offset >= last_plane_offset) {
      last_plane = i;
      last_plane_offset = plane_offset;
    }
  }

  last_plane_size =
      GST_VIDEO_INFO_PLANE_STRIDE (info,
      last_plane) * GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (info->finfo,
      last_plane, config->height);

  GST_VIDEO_INFO_SIZE (info) = last_plane_offset + last_plane_size;

  GST_DEBUG ("last plane #%u:  offset: %" G_GSIZE_FORMAT " size: %"
      G_GSIZE_FORMAT " => frame size minus extra padding: %" G_GSIZE_FORMAT,
      last_plane, last_plane_offset, last_plane_size,
      GST_VIDEO_INFO_SIZE (info));
}
Exemplo n.º 7
0
static GstFlowReturn
gst_v4l2_buffer_pool_alloc_buffer (GstBufferPool * bpool, GstBuffer ** buffer,
    GstBufferPoolAcquireParams * params)
{
  GstV4l2BufferPool *pool = GST_V4L2_BUFFER_POOL (bpool);
  GstBuffer *newbuf;
  GstV4l2Meta *meta;
  GstV4l2Object *obj;
  GstVideoInfo *info;
  guint index;

  obj = pool->obj;
  info = &obj->info;

  switch (obj->mode) {
    case GST_V4L2_IO_RW:
    {
      newbuf =
          gst_buffer_new_allocate (pool->allocator, pool->size, &pool->params);
      break;
    }
    case GST_V4L2_IO_MMAP:
    {
      newbuf = gst_buffer_new ();
      meta = GST_V4L2_META_ADD (newbuf);

      index = pool->num_allocated;

      GST_LOG_OBJECT (pool, "creating buffer %u, %p", index, newbuf);

      meta->vbuffer.index = index;
      meta->vbuffer.type = obj->type;
      meta->vbuffer.memory = V4L2_MEMORY_MMAP;

      if (v4l2_ioctl (pool->video_fd, VIDIOC_QUERYBUF, &meta->vbuffer) < 0)
        goto querybuf_failed;

      GST_LOG_OBJECT (pool, "  index:     %u", meta->vbuffer.index);
      GST_LOG_OBJECT (pool, "  type:      %d", meta->vbuffer.type);
      GST_LOG_OBJECT (pool, "  bytesused: %u", meta->vbuffer.bytesused);
      GST_LOG_OBJECT (pool, "  flags:     %08x", meta->vbuffer.flags);
      GST_LOG_OBJECT (pool, "  field:     %d", meta->vbuffer.field);
      GST_LOG_OBJECT (pool, "  memory:    %d", meta->vbuffer.memory);
      if (meta->vbuffer.memory == V4L2_MEMORY_MMAP)
        GST_LOG_OBJECT (pool, "  MMAP offset:  %u", meta->vbuffer.m.offset);
      GST_LOG_OBJECT (pool, "  length:    %u", meta->vbuffer.length);
      GST_LOG_OBJECT (pool, "  input:     %u", meta->vbuffer.input);

      meta->mem = v4l2_mmap (0, meta->vbuffer.length,
          PROT_READ | PROT_WRITE, MAP_SHARED, pool->video_fd,
          meta->vbuffer.m.offset);
      if (meta->mem == MAP_FAILED)
        goto mmap_failed;

      gst_buffer_append_memory (newbuf,
          gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE,
              meta->mem, meta->vbuffer.length, 0, meta->vbuffer.length, NULL,
              NULL));

      /* add metadata to raw video buffers */
      if (pool->add_videometa && info->finfo) {
        const GstVideoFormatInfo *finfo = info->finfo;
        gsize offset[GST_VIDEO_MAX_PLANES];
        gint width, height, n_planes, offs, i, stride[GST_VIDEO_MAX_PLANES];

        width = GST_VIDEO_INFO_WIDTH (info);
        height = GST_VIDEO_INFO_HEIGHT (info);
        n_planes = GST_VIDEO_INFO_N_PLANES (info);

        GST_DEBUG_OBJECT (pool, "adding video meta, bytesperline %d",
            obj->bytesperline);

        offs = 0;
        for (i = 0; i < n_planes; i++) {
          offset[i] = offs;
          stride[i] =
              GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (finfo, i, obj->bytesperline);

          offs +=
              stride[i] * GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (finfo, i, height);
        }
        gst_buffer_add_video_meta_full (newbuf, GST_VIDEO_FRAME_FLAG_NONE,
            GST_VIDEO_INFO_FORMAT (info), width, height, n_planes,
            offset, stride);
      }
      break;
    }
    case GST_V4L2_IO_USERPTR:
    default:
      newbuf = NULL;
      g_assert_not_reached ();
  }

  pool->num_allocated++;

  *buffer = newbuf;

  return GST_FLOW_OK;

  /* ERRORS */
querybuf_failed:
  {
    gint errnosave = errno;

    GST_WARNING ("Failed QUERYBUF: %s", g_strerror (errnosave));
    gst_buffer_unref (newbuf);
    errno = errnosave;
    return GST_FLOW_ERROR;
  }
mmap_failed:
  {
    gint errnosave = errno;

    GST_WARNING ("Failed to mmap: %s", g_strerror (errnosave));
    gst_buffer_unref (newbuf);
    errno = errnosave;
    return GST_FLOW_ERROR;
  }
}
Exemplo n.º 8
0
static bool gst_vlc_video_info_from_vout( GstVideoInfo *p_info,
        GstVideoAlignment *p_align, GstCaps *p_caps, decoder_t *p_dec,
        picture_t *p_pic_info )
{
    const GstVideoFormatInfo *p_vinfo = p_info->finfo;
    picture_t *p_pic = NULL;
    int i;

    /* Ensure the queue is empty */
    gst_vlc_dec_ensure_empty_queue( p_dec );
    gst_video_info_align( p_info, p_align );

    if( !gst_vlc_set_vout_fmt( p_info, p_align, p_caps, p_dec ))
    {
        msg_Err( p_dec, "failed to set output format to vout" );
        return false;
    }

    /* Acquire a picture and release it. This is to get the picture
     * stride/offsets info for the Gstreamer decoder looking to use
     * downstream bufferpool directly; Zero-Copy */
    if( !decoder_UpdateVideoFormat( p_dec ) )
        p_pic = decoder_NewPicture( p_dec );
    if( !p_pic )
    {
        msg_Err( p_dec, "failed to acquire picture from vout; for pic info" );
        return false;
    }

    /* reject if strides don't match */
    for( i = 0; i < p_pic->i_planes; i++ )
        if( p_info->stride[i] != p_pic->p[i].i_pitch )
            goto strides_mismatch;

    p_info->offset[0] = 0;
    for( i = 1; i < p_pic->i_planes; i++ )
    {
        p_info->offset[i] = p_info->offset[i-1] +
            p_pic->p[i-1].i_pitch * p_pic->p[i-1].i_lines;
    }
    GST_VIDEO_INFO_SIZE( p_info ) = p_info->offset[i-1] +
        p_pic->p[i-1].i_pitch * p_pic->p[i-1].i_lines;

    for( i = 0; i < p_pic->i_planes; i++ )
    {
        int i_v_edge, i_h_edge;

        i_h_edge =
            GST_VIDEO_FORMAT_INFO_SCALE_WIDTH( p_vinfo, i,
                    p_align->padding_left);
        i_v_edge =
            GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT( p_vinfo, i,
                    p_align->padding_top);

        p_info->offset[i] += ( i_v_edge * p_info->stride[i] ) +
            ( i_h_edge * GST_VIDEO_FORMAT_INFO_PSTRIDE( p_vinfo, i ));
    }

    memcpy( p_pic_info, p_pic, sizeof( picture_t ));
    picture_Release( p_pic );

    return true;

strides_mismatch:
    msg_Err( p_dec, "strides mismatch" );
    picture_Release( p_pic );
    return false;
}