Exemplo n.º 1
0
static GstFlowReturn
gst_msdkdec_finish_task (GstMsdkDec * thiz, MsdkDecTask * task)
{
  GstVideoDecoder *decoder = GST_VIDEO_DECODER (thiz);
  GstFlowReturn flow;
  GstVideoCodecFrame *frame;
  MsdkSurface *surface;
  mfxStatus status;
  GList *l;

  if (G_LIKELY (task->sync_point)) {
    status =
        MFXVideoCORE_SyncOperation (gst_msdk_context_get_session
        (thiz->context), task->sync_point, 300000);
    if (status != MFX_ERR_NONE) {
      GST_ERROR_OBJECT (thiz, "failed to do sync operation");
      return GST_FLOW_ERROR;
    }
  }

  if (G_LIKELY (task->sync_point || (task->surface && task->decode_only))) {
    gboolean decode_only = task->decode_only;

    frame = gst_msdkdec_get_oldest_frame (decoder);

    l = g_list_find_custom (thiz->decoded_msdk_surfaces, task->surface,
        _find_msdk_surface);
    if (l) {
      surface = l->data;
    } else {
      GST_ERROR_OBJECT (thiz, "Couldn't find the cached MSDK surface");
      return GST_FLOW_ERROR;
    }

    if (G_LIKELY (frame)) {
      if (G_LIKELY (surface->copy.buffer == NULL)) {
        frame->output_buffer = gst_buffer_ref (surface->buf);
      } else {
        gst_video_frame_copy (&surface->copy, &surface->data);
        frame->output_buffer = gst_buffer_ref (surface->copy.buffer);
      }
    }

    free_surface (thiz, surface);
    task->sync_point = NULL;
    task->surface = NULL;
    task->decode_only = FALSE;

    if (!frame)
      return GST_FLOW_FLUSHING;
    gst_video_codec_frame_unref (frame);

    if (decode_only)
      GST_VIDEO_CODEC_FRAME_SET_DECODE_ONLY (frame);
    flow = gst_video_decoder_finish_frame (decoder, frame);
    return flow;
  }
  return GST_FLOW_OK;
}
Exemplo n.º 2
0
static GstFlowReturn
gst_mpeg2dec_crop_buffer (GstMpeg2dec * dec, GstVideoCodecFrame * in_frame,
    GstVideoFrame * input_vframe)
{
  GstVideoCodecState *state;
  GstVideoInfo *info;
  GstVideoInfo *dinfo;
  GstVideoFrame output_frame;
  GstFlowReturn ret;
  GstBuffer *buffer = NULL;

  state = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (dec));
  info = &state->info;
  dinfo = &dec->decoded_info;

  GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, dec,
      "Copying input buffer %ux%u (%" G_GSIZE_FORMAT ") to output buffer "
      "%ux%u (%" G_GSIZE_FORMAT ")", dinfo->width, dinfo->height,
      dinfo->size, info->width, info->height, info->size);

  ret = gst_buffer_pool_acquire_buffer (dec->downstream_pool, &buffer, NULL);
  if (ret != GST_FLOW_OK)
    goto beach;

  if (!gst_video_frame_map (&output_frame, info, buffer, GST_MAP_WRITE))
    goto map_fail;

  if (in_frame->output_buffer)
    gst_buffer_unref (in_frame->output_buffer);
  in_frame->output_buffer = buffer;

  if (!gst_video_frame_copy (&output_frame, input_vframe))
    goto copy_failed;

  gst_video_frame_unmap (&output_frame);

  GST_BUFFER_FLAGS (in_frame->output_buffer) =
      GST_BUFFER_FLAGS (input_vframe->buffer);

beach:
  gst_video_codec_state_unref (state);

  return ret;

map_fail:
  {
    GST_ERROR_OBJECT (dec, "Failed to map output frame");
    gst_video_codec_state_unref (state);
    return GST_FLOW_ERROR;
  }

copy_failed:
  {
    GST_ERROR_OBJECT (dec, "Failed to copy output frame");
    gst_video_codec_state_unref (state);
    return GST_FLOW_ERROR;
  }
}
Exemplo n.º 3
0
static GstFlowReturn
gst_v4l2_buffer_pool_copy_buffer (GstV4l2BufferPool * pool, GstBuffer * dest,
    GstBuffer * src)
{
  const GstVideoFormatInfo *finfo = pool->caps_info.finfo;

  GST_LOG_OBJECT (pool, "copying buffer");

  if (finfo && (finfo->format != GST_VIDEO_FORMAT_UNKNOWN &&
          finfo->format != GST_VIDEO_FORMAT_ENCODED)) {
    GstVideoFrame src_frame, dest_frame;

    GST_DEBUG_OBJECT (pool, "copy video frame");

    /* we have raw video, use videoframe copy to get strides right */
    if (!gst_video_frame_map (&src_frame, &pool->caps_info, src, GST_MAP_READ))
      goto invalid_buffer;

    if (!gst_video_frame_map (&dest_frame, &pool->caps_info, dest,
            GST_MAP_WRITE)) {
      gst_video_frame_unmap (&src_frame);
      goto invalid_buffer;
    }

    gst_video_frame_copy (&dest_frame, &src_frame);

    gst_video_frame_unmap (&src_frame);
    gst_video_frame_unmap (&dest_frame);
  } else {
    GstMapInfo map;

    GST_DEBUG_OBJECT (pool, "copy raw bytes");

    if (!gst_buffer_map (src, &map, GST_MAP_READ))
      goto invalid_buffer;

    gst_buffer_fill (dest, 0, map.data, gst_buffer_get_size (src));

    gst_buffer_unmap (src, &map);
    gst_buffer_resize (dest, 0, gst_buffer_get_size (src));
  }

  GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, pool, "slow copy into buffer %p",
      dest);

  return GST_FLOW_OK;

invalid_buffer:
  {
    GST_ERROR_OBJECT (pool, "could not map buffer");
    return GST_FLOW_ERROR;
  }
}
Exemplo n.º 4
0
static GstFlowReturn
gst_gaussianblur_transform_frame (GstVideoFilter * vfilter,
    GstVideoFrame * in_frame, GstVideoFrame * out_frame)
{
  GstGaussianBlur *filter = GST_GAUSSIANBLUR (vfilter);
  GstClockTime timestamp;
  gint64 stream_time;
  gfloat sigma;
  guint8 *src, *dest;

  /* GstController: update the properties */
  timestamp = GST_BUFFER_TIMESTAMP (in_frame->buffer);
  stream_time =
      gst_segment_to_stream_time (&GST_BASE_TRANSFORM (filter)->segment,
      GST_FORMAT_TIME, timestamp);

  GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
      GST_TIME_ARGS (timestamp));

  if (GST_CLOCK_TIME_IS_VALID (stream_time))
    gst_object_sync_values (GST_OBJECT (filter), stream_time);

  GST_OBJECT_LOCK (filter);
  sigma = filter->sigma;
  GST_OBJECT_UNLOCK (filter);

  if (filter->cur_sigma != sigma) {
    g_free (filter->kernel);
    filter->kernel = NULL;
    g_free (filter->kernel_sum);
    filter->kernel_sum = NULL;
    filter->cur_sigma = sigma;
  }
  if (filter->kernel == NULL &&
      !make_gaussian_kernel (filter, filter->cur_sigma)) {
    GST_ELEMENT_ERROR (filter, RESOURCE, NO_SPACE_LEFT, ("Out of memory"),
        ("Failed to allocation gaussian kernel"));
    return GST_FLOW_ERROR;
  }

  /*
   * Perform gaussian smoothing on the image using the input standard
   * deviation.
   */
  src = GST_VIDEO_FRAME_COMP_DATA (in_frame, 0);
  dest = GST_VIDEO_FRAME_COMP_DATA (out_frame, 0);
  gst_video_frame_copy (out_frame, in_frame);
  gaussian_smooth (filter, src, dest);

  return GST_FLOW_OK;
}
Exemplo n.º 5
0
static GstBuffer *
gst_core_media_buffer_new_from_buffer (GstBuffer * buf, GstVideoInfo * info)
{
  gboolean ret;
  GstBuffer *copy_buf;
  GstVideoFrame dest, src;
  GstAllocator *allocator;

  allocator = gst_allocator_find (GST_ALLOCATOR_SYSMEM);
  if (!allocator) {
    GST_ERROR ("Could not find SYSMEM allocator");
    return NULL;
  }

  copy_buf = gst_buffer_new_allocate (allocator, info->size, NULL);

  gst_object_unref (allocator);

  if (!gst_video_frame_map (&dest, info, copy_buf, GST_MAP_WRITE)) {
    GST_ERROR ("Could not map destination frame");
    goto error;
  }

  if (!gst_video_frame_map (&src, info, buf, GST_MAP_READ)) {
    GST_ERROR ("Could not map source frame");
    gst_video_frame_unmap (&dest);
    goto error;
  }

  ret = gst_video_frame_copy (&dest, &src);

  gst_video_frame_unmap (&dest);
  gst_video_frame_unmap (&src);

  if (!ret) {
    GST_ERROR ("Could not copy frame");
    goto error;
  }

  return copy_buf;

error:
  if (copy_buf) {
    gst_buffer_unref (copy_buf);
  }
  return NULL;
}
Exemplo n.º 6
0
static GstFlowReturn
gst_smooth_transform_frame (GstVideoFilter * vfilter, GstVideoFrame * in_frame,
    GstVideoFrame * out_frame)
{
  GstSmooth *smooth;

  smooth = GST_SMOOTH (vfilter);

  if (!smooth->active) {
    gst_video_frame_copy (out_frame, in_frame);
    return GST_FLOW_OK;
  }

  smooth_filter (GST_VIDEO_FRAME_COMP_DATA (out_frame, 0),
      GST_VIDEO_FRAME_COMP_DATA (in_frame, 0),
      GST_VIDEO_FRAME_COMP_WIDTH (in_frame, 0),
      GST_VIDEO_FRAME_COMP_HEIGHT (in_frame, 0),
      GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0),
      GST_VIDEO_FRAME_COMP_STRIDE (out_frame, 0),
      smooth->tolerance, smooth->filtersize);
  if (!smooth->luma_only) {
    smooth_filter (GST_VIDEO_FRAME_COMP_DATA (out_frame, 1),
        GST_VIDEO_FRAME_COMP_DATA (in_frame, 1),
        GST_VIDEO_FRAME_COMP_WIDTH (in_frame, 1),
        GST_VIDEO_FRAME_COMP_HEIGHT (in_frame, 1),
        GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 1),
        GST_VIDEO_FRAME_COMP_STRIDE (out_frame, 1),
        smooth->tolerance, smooth->filtersize);
    smooth_filter (GST_VIDEO_FRAME_COMP_DATA (out_frame, 2),
        GST_VIDEO_FRAME_COMP_DATA (in_frame, 2),
        GST_VIDEO_FRAME_COMP_WIDTH (in_frame, 2),
        GST_VIDEO_FRAME_COMP_HEIGHT (in_frame, 2),
        GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 2),
        GST_VIDEO_FRAME_COMP_STRIDE (out_frame, 2),
        smooth->tolerance, smooth->filtersize);
  } else {
    gst_video_frame_copy_plane (out_frame, in_frame, 1);
    gst_video_frame_copy_plane (out_frame, in_frame, 2);
  }

  return GST_FLOW_OK;
}
Exemplo n.º 7
0
static MsdkSurface *
get_msdk_surface_from_input_buffer (GstMsdkVPP * thiz, GstBuffer * inbuf)
{
  GstVideoFrame src_frame, out_frame;
  MsdkSurface *msdk_surface;
  GstMemory *mem = NULL;

  if (gst_msdk_is_msdk_buffer (inbuf)) {
    msdk_surface = g_slice_new0 (MsdkSurface);
    msdk_surface->surface = gst_msdk_get_surface_from_buffer (inbuf);
    msdk_surface->buf = gst_buffer_ref (inbuf);
    return msdk_surface;
  }

  /* If upstream hasn't accpeted the proposed msdk bufferpool,
   * just copy frame (if not dmabuf backed) to msdk buffer and
   * take a surface from it.   */
  if (!(msdk_surface =
          get_surface_from_pool (thiz, thiz->sinkpad_buffer_pool, NULL)))
    goto error;

#ifndef _WIN32
  /************ dmabuf-import ************* */
  /* if upstream provided a dmabuf backed memory, but not an msdk
   * buffer, we could export the dmabuf to underlined vasurface */
  mem = gst_buffer_peek_memory (inbuf, 0);
  if (gst_is_dmabuf_memory (mem)) {
    if (import_dmabuf_to_msdk_surface (thiz, inbuf, msdk_surface))
      return msdk_surface;
    else
      GST_INFO_OBJECT (thiz, "Upstream dmabuf-backed memory is not imported"
          "to the msdk surface, fall back to the copy input frame method");
  }
#endif

  if (!gst_video_frame_map (&src_frame, &thiz->sinkpad_info, inbuf,
          GST_MAP_READ)) {
    GST_ERROR_OBJECT (thiz, "failed to map the frame for source");
    goto error;
  }

  if (!gst_video_frame_map (&out_frame, &thiz->sinkpad_buffer_pool_info,
          msdk_surface->buf, GST_MAP_WRITE)) {
    GST_ERROR_OBJECT (thiz, "failed to map the frame for destination");
    gst_video_frame_unmap (&src_frame);
    goto error;
  }

  if (!gst_video_frame_copy (&out_frame, &src_frame)) {
    GST_ERROR_OBJECT (thiz, "failed to copy frame");
    gst_video_frame_unmap (&out_frame);
    gst_video_frame_unmap (&src_frame);
    goto error;
  }

  gst_video_frame_unmap (&out_frame);
  gst_video_frame_unmap (&src_frame);

  return msdk_surface;

error:
  return NULL;
}
/**
 * gst_vaapi_plugin_base_get_input_buffer:
 * @plugin: a #GstVaapiPluginBase
 * @incaps: the sink pad (input) buffer
 * @outbuf_ptr: the pointer to location to the VA surface backed buffer
 *
 * Acquires the sink pad (input) buffer as a VA surface backed
 * buffer. This is mostly useful for raw YUV buffers, as source
 * buffers that are already backed as a VA surface are passed
 * verbatim.
 *
 * Returns: #GST_FLOW_OK if the buffer could be acquired
 */
GstFlowReturn
gst_vaapi_plugin_base_get_input_buffer (GstVaapiPluginBase * plugin,
    GstBuffer * inbuf, GstBuffer ** outbuf_ptr)
{
  GstVaapiVideoMeta *meta;
  GstBuffer *outbuf;
  GstVideoFrame src_frame, out_frame;
  gboolean success;

  g_return_val_if_fail (inbuf != NULL, GST_FLOW_ERROR);
  g_return_val_if_fail (outbuf_ptr != NULL, GST_FLOW_ERROR);

  meta = gst_buffer_get_vaapi_video_meta (inbuf);
  if (meta) {
    *outbuf_ptr = gst_buffer_ref (inbuf);
    return GST_FLOW_OK;
  }

  if (!plugin->sinkpad_caps_is_raw)
    goto error_invalid_buffer;

  if (!plugin->sinkpad_buffer_pool)
    goto error_no_pool;

  if (!gst_buffer_pool_set_active (plugin->sinkpad_buffer_pool, TRUE))
    goto error_active_pool;

  outbuf = NULL;
  if (gst_buffer_pool_acquire_buffer (plugin->sinkpad_buffer_pool,
          &outbuf, NULL) != GST_FLOW_OK)
    goto error_create_buffer;

  if (is_dma_buffer (inbuf)) {
    if (!plugin_bind_dma_to_vaapi_buffer (plugin, inbuf, outbuf))
      goto error_bind_dma_buffer;
    goto done;
  }

  if (!gst_video_frame_map (&src_frame, &plugin->sinkpad_info, inbuf,
          GST_MAP_READ))
    goto error_map_src_buffer;

  if (!gst_video_frame_map (&out_frame, &plugin->sinkpad_info, outbuf,
          GST_MAP_WRITE))
    goto error_map_dst_buffer;

  success = gst_video_frame_copy (&out_frame, &src_frame);
  gst_video_frame_unmap (&out_frame);
  gst_video_frame_unmap (&src_frame);
  if (!success)
    goto error_copy_buffer;

done:
  gst_buffer_copy_into (outbuf, inbuf, GST_BUFFER_COPY_FLAGS |
      GST_BUFFER_COPY_TIMESTAMPS, 0, -1);
  *outbuf_ptr = outbuf;
  return GST_FLOW_OK;

  /* ERRORS */
error_no_pool:
  {
    GST_ELEMENT_ERROR (plugin, STREAM, FAILED,
        ("no buffer pool was negotiated"), ("no buffer pool was negotiated"));
    return GST_FLOW_ERROR;
  }
error_active_pool:
  {
    GST_ELEMENT_ERROR (plugin, STREAM, FAILED,
        ("failed to activate buffer pool"), ("failed to activate buffer pool"));
    return GST_FLOW_ERROR;
  }
error_map_dst_buffer:
  {
    gst_video_frame_unmap (&src_frame);
    // fall-through
  }
error_map_src_buffer:
  {
    GST_WARNING ("failed to map buffer");
    gst_buffer_unref (outbuf);
    return GST_FLOW_NOT_SUPPORTED;
  }

  /* ERRORS */
error_invalid_buffer:
  {
    GST_ELEMENT_ERROR (plugin, STREAM, FAILED,
        ("failed to validate source buffer"),
        ("failed to validate source buffer"));
    return GST_FLOW_ERROR;
  }
error_create_buffer:
  {
    GST_ELEMENT_ERROR (plugin, STREAM, FAILED, ("Allocation failed"),
        ("failed to create buffer"));
    return GST_FLOW_ERROR;
  }
error_bind_dma_buffer:
  {
    GST_ELEMENT_ERROR (plugin, STREAM, FAILED, ("Allocation failed"),
        ("failed to bind dma_buf to VA surface buffer"));
    gst_buffer_unref (outbuf);
    return GST_FLOW_ERROR;
  }
error_copy_buffer:
  {
    GST_WARNING ("failed to upload buffer to VA surface");
    gst_buffer_unref (outbuf);
    return GST_FLOW_NOT_SUPPORTED;
  }
}
Exemplo n.º 9
0
gboolean gst_imx_blitter_set_input_frame(GstImxBlitter *blitter, GstBuffer *frame)
{
	gboolean ret;
	GstImxPhysMemMeta *phys_mem_meta;
	GstImxBlitterClass *klass;

	g_assert(blitter != NULL);
	klass = GST_IMX_BLITTER_CLASS(G_OBJECT_GET_CLASS(blitter));
	g_assert(klass->set_input_frame != NULL);

	if (frame == NULL)
		return klass->set_input_frame(blitter, NULL);

	phys_mem_meta = GST_IMX_PHYS_MEM_META_GET(frame);

	if ((phys_mem_meta == NULL) || (phys_mem_meta->phys_addr == 0))
	{
		GstFlowReturn flow_ret;
		GstBuffer *internal_input_frame;

		/* No DMA memory present; the input frame needs to be copied to an internal input frame */

		GST_TRACE_OBJECT(blitter, "input frame does not use DMA memory - copying input frame to internal frame");

		{
			if (blitter->dma_bufferpool == NULL)
			{
				GST_TRACE_OBJECT(blitter, "need to create internal bufferpool");

				/* DMA bufferpool does not exist yet - create it now,
				 * so that it can in turn create the internal input frame */

				GstCaps *caps = gst_video_info_to_caps(&(blitter->input_video_info));

				blitter->dma_bufferpool = gst_imx_blitter_create_bufferpool(
					blitter,
					caps,
					blitter->input_video_info.size,
					0, 0,
					NULL,
					NULL
				);

				gst_caps_unref(caps);

				if (blitter->dma_bufferpool == NULL)
				{
					GST_ERROR_OBJECT(blitter, "failed to create internal bufferpool");
					return FALSE;
				}
			}

			/* Future versions of this code may propose the internal bufferpool upstream;
			 * hence the is_active check */
			if (!gst_buffer_pool_is_active(blitter->dma_bufferpool))
				gst_buffer_pool_set_active(blitter->dma_bufferpool, TRUE);
		}

		/* Create new internal input frame */
		GST_TRACE_OBJECT(blitter, "acquiring buffer for internal input frame");
		internal_input_frame = NULL;
		flow_ret = gst_buffer_pool_acquire_buffer(blitter->dma_bufferpool, &internal_input_frame, NULL);
		if (flow_ret != GST_FLOW_OK)
		{
			if (internal_input_frame != NULL)
				gst_buffer_unref(internal_input_frame);

			GST_ERROR_OBJECT(blitter, "error acquiring input frame buffer: %s", gst_pad_mode_get_name(flow_ret));
			return FALSE;
		}

		/* Copy the input buffer's pixels to the internal input frame */
		{
			GstVideoFrame input_vidframe, internal_input_vidframe;

			gst_video_frame_map(&input_vidframe, &(blitter->input_video_info), frame, GST_MAP_READ);
			gst_video_frame_map(&internal_input_vidframe, &(blitter->input_video_info), internal_input_frame, GST_MAP_WRITE);

			/* gst_video_frame_copy() makes sure stride and plane offset values from both frames are respected */
			gst_video_frame_copy(&internal_input_vidframe, &input_vidframe);

			/* copy interlace flags */
			GST_BUFFER_FLAGS(internal_input_frame) |= (GST_BUFFER_FLAGS(frame) & (GST_VIDEO_BUFFER_FLAG_INTERLACED | GST_VIDEO_BUFFER_FLAG_TFF | GST_VIDEO_BUFFER_FLAG_RFF | GST_VIDEO_BUFFER_FLAG_ONEFIELD));

			gst_video_frame_unmap(&internal_input_vidframe);
			gst_video_frame_unmap(&input_vidframe);
		}

		ret = klass->set_input_frame(blitter, internal_input_frame);

		gst_buffer_unref(internal_input_frame);
	}
	else
	{
		GST_TRACE_OBJECT(blitter, "input frame uses DMA memory - setting it directly as input frame");
		ret = klass->set_input_frame(blitter, frame);
	}

	return ret;
}
Exemplo n.º 10
0
static GstFlowReturn gst_fsl_vpu_base_enc_handle_frame(GstVideoEncoder *encoder, GstVideoCodecFrame *frame)
{
	VpuEncRetCode enc_ret;
	VpuEncEncParam enc_enc_param;
	GstFslPhysMemMeta *phys_mem_meta;
	GstFslVpuBaseEncClass *klass;
	GstFslVpuBaseEnc *vpu_base_enc;
	VpuFrameBuffer input_framebuf;
	GstBuffer *input_buffer;

	vpu_base_enc = GST_FSL_VPU_BASE_ENC(encoder);
	klass = GST_FSL_VPU_BASE_ENC_CLASS(G_OBJECT_GET_CLASS(vpu_base_enc));

	g_assert(klass->set_frame_enc_params != NULL);

	memset(&enc_enc_param, 0, sizeof(enc_enc_param));
	memset(&input_framebuf, 0, sizeof(input_framebuf));

	phys_mem_meta = GST_FSL_PHYS_MEM_META_GET(frame->input_buffer);

	if (phys_mem_meta == NULL)
	{
		GstVideoFrame temp_input_video_frame, temp_incoming_video_frame;

		if (vpu_base_enc->internal_input_buffer == NULL)
		{
			/* The internal input buffer is the temp input frame's DMA memory.
			 * If it does not exist yet, it needs to be created here. The temp input
			 * frame is then mapped. */

			GstFlowReturn flow_ret;

			if (vpu_base_enc->internal_bufferpool == NULL)
			{
				/* Internal bufferpool does not exist yet - create it now,
				 * so that it can in turn create the internal input buffer */

				GstStructure *config;
				GstCaps *caps;
				GstAllocator *allocator;

				caps = gst_video_info_to_caps(&(vpu_base_enc->video_info));
				vpu_base_enc->internal_bufferpool = gst_fsl_phys_mem_buffer_pool_new(FALSE);
				allocator = gst_fsl_vpu_enc_allocator_obtain();

				config = gst_buffer_pool_get_config(vpu_base_enc->internal_bufferpool);
				gst_buffer_pool_config_set_params(config, caps, vpu_base_enc->video_info.size, 2, 0);
				gst_buffer_pool_config_set_allocator(config, allocator, NULL);
				gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_FSL_PHYS_MEM);
				gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_VIDEO_META);
				gst_buffer_pool_set_config(vpu_base_enc->internal_bufferpool, config);

				gst_caps_unref(caps);

				if (vpu_base_enc->internal_bufferpool == NULL)
				{
					GST_ERROR_OBJECT(vpu_base_enc, "failed to create internal bufferpool");
					return FALSE;
				}
			}

			/* Future versions of this code may propose the internal bufferpool upstream;
			 * hence the is_active check */
			if (!gst_buffer_pool_is_active(vpu_base_enc->internal_bufferpool))
				gst_buffer_pool_set_active(vpu_base_enc->internal_bufferpool, TRUE);

			/* Create the internal input buffer */
			flow_ret = gst_buffer_pool_acquire_buffer(vpu_base_enc->internal_bufferpool, &(vpu_base_enc->internal_input_buffer), NULL);
			if (flow_ret != GST_FLOW_OK)
			{
				GST_ERROR_OBJECT(vpu_base_enc, "error acquiring input frame buffer: %s", gst_pad_mode_get_name(flow_ret));
				return FALSE;
			}
		}

		gst_video_frame_map(&temp_incoming_video_frame, &(vpu_base_enc->video_info), frame->input_buffer, GST_MAP_READ);
		gst_video_frame_map(&temp_input_video_frame, &(vpu_base_enc->video_info), vpu_base_enc->internal_input_buffer, GST_MAP_WRITE);

		gst_video_frame_copy(&temp_input_video_frame, &temp_incoming_video_frame);

		gst_video_frame_unmap(&temp_incoming_video_frame);
		gst_video_frame_unmap(&temp_input_video_frame);

		input_buffer = vpu_base_enc->internal_input_buffer;
		phys_mem_meta = GST_FSL_PHYS_MEM_META_GET(vpu_base_enc->internal_input_buffer);
	}
	else
		input_buffer = frame->input_buffer;

	{
		gsize *plane_offsets;
		gint *plane_strides;
		GstVideoMeta *video_meta;
		unsigned char *phys_ptr;

		video_meta = gst_buffer_get_video_meta(input_buffer);
		if (video_meta != NULL)
		{
			plane_offsets = video_meta->offset;
			plane_strides = video_meta->stride;
		}
		else
		{
			plane_offsets = vpu_base_enc->video_info.offset;
			plane_strides = vpu_base_enc->video_info.stride;
		}

		phys_ptr = (unsigned char*)(phys_mem_meta->phys_addr);

		input_framebuf.pbufY = phys_ptr;
		input_framebuf.pbufCb = phys_ptr + plane_offsets[1];
		input_framebuf.pbufCr = phys_ptr + plane_offsets[2];
		input_framebuf.pbufMvCol = NULL;
		input_framebuf.nStrideY = plane_strides[0];
		input_framebuf.nStrideC = plane_strides[1];

		GST_TRACE_OBJECT(vpu_base_enc, "width: %d   height: %d   stride 0: %d   stride 1: %d   offset 0: %d   offset 1: %d   offset 2: %d", GST_VIDEO_INFO_WIDTH(&(vpu_base_enc->video_info)), GST_VIDEO_INFO_HEIGHT(&(vpu_base_enc->video_info)), plane_strides[0], plane_strides[1], plane_offsets[0], plane_offsets[1], plane_offsets[2]);

		if (vpu_base_enc->framebuffers == NULL)
		{
			GstFslVpuFramebufferParams fbparams;
			gst_fsl_vpu_framebuffers_enc_init_info_to_params(&(vpu_base_enc->init_info), &fbparams);
			fbparams.pic_width = vpu_base_enc->open_param.nPicWidth;
			fbparams.pic_height = vpu_base_enc->open_param.nPicHeight;
			vpu_base_enc->framebuffers = gst_fsl_vpu_framebuffers_new(&fbparams, gst_fsl_vpu_enc_allocator_obtain());
			gst_fsl_vpu_framebuffers_register_with_encoder(vpu_base_enc->framebuffers, vpu_base_enc->handle, plane_strides[0]);
		}

		if (vpu_base_enc->output_phys_buffer == NULL)
		{
			vpu_base_enc->output_phys_buffer = (GstFslPhysMemory *)gst_allocator_alloc(gst_fsl_vpu_enc_allocator_obtain(), vpu_base_enc->framebuffers->total_size, NULL);

			if (vpu_base_enc->output_phys_buffer == NULL)
			{
				GST_ERROR_OBJECT(vpu_base_enc, "could not allocate physical buffer for output data");
				return GST_FLOW_ERROR;
			}
		}
	}

	enc_enc_param.nInVirtOutput = (unsigned int)(vpu_base_enc->output_phys_buffer->mapped_virt_addr); /* TODO */
	enc_enc_param.nInPhyOutput = (unsigned int)(vpu_base_enc->output_phys_buffer->phys_addr);
	enc_enc_param.nInOutputBufLen = vpu_base_enc->output_phys_buffer->mem.size;
	enc_enc_param.nPicWidth = vpu_base_enc->framebuffers->pic_width;
	enc_enc_param.nPicHeight = vpu_base_enc->framebuffers->pic_height;
	enc_enc_param.nFrameRate = vpu_base_enc->open_param.nFrameRate;
	enc_enc_param.pInFrame = &input_framebuf;

	if (!klass->set_frame_enc_params(vpu_base_enc, &enc_enc_param, &(vpu_base_enc->open_param)))
	{
		GST_ERROR_OBJECT(vpu_base_enc, "derived class could not frame enc params");
		return GST_FLOW_ERROR;
	}

	enc_ret = VPU_EncEncodeFrame(vpu_base_enc->handle, &enc_enc_param);
	if (enc_ret != VPU_ENC_RET_SUCCESS)
	{
		GST_ERROR_OBJECT(vpu_base_enc, "failed to encode frame: %s", gst_fsl_vpu_strerror(enc_ret));
		VPU_EncReset(vpu_base_enc->handle);
		return GST_FLOW_ERROR;
	}

	GST_LOG_OBJECT(vpu_base_enc, "out ret code: 0x%x  out size: %u", enc_enc_param.eOutRetCode, enc_enc_param.nOutOutputSize);

	if ((enc_enc_param.eOutRetCode & VPU_ENC_OUTPUT_DIS) || (enc_enc_param.eOutRetCode & VPU_ENC_OUTPUT_SEQHEADER))
	{
		gst_video_encoder_allocate_output_frame(encoder, frame, enc_enc_param.nOutOutputSize);
		gst_buffer_fill(frame->output_buffer, 0, vpu_base_enc->output_phys_buffer->mapped_virt_addr, enc_enc_param.nOutOutputSize);
		gst_video_encoder_finish_frame(encoder, frame);
	}

	return GST_FLOW_OK;
}
Exemplo n.º 11
0
static GstBuffer *
gst_kms_sink_get_input_buffer (GstKMSSink * self, GstBuffer * inbuf)
{
  GstMemory *mem;
  GstBuffer *buf;
  GstFlowReturn ret;
  GstVideoFrame inframe, outframe;
  gboolean success;

  mem = gst_buffer_peek_memory (inbuf, 0);
  if (!mem)
    return NULL;

  if (gst_is_kms_memory (mem))
    return gst_buffer_ref (inbuf);

  buf = NULL;
  if (gst_kms_sink_import_dmabuf (self, inbuf, &buf))
    return buf;

  GST_CAT_INFO_OBJECT (CAT_PERFORMANCE, self, "frame copy");

  if (!gst_buffer_pool_set_active (self->pool, TRUE))
    goto activate_pool_failed;

  ret = gst_buffer_pool_acquire_buffer (self->pool, &buf, NULL);
  if (ret != GST_FLOW_OK)
    goto create_buffer_failed;

  if (!gst_video_frame_map (&inframe, &self->vinfo, inbuf, GST_MAP_READ))
    goto error_map_src_buffer;

  if (!gst_video_frame_map (&outframe, &self->vinfo, buf, GST_MAP_WRITE))
    goto error_map_dst_buffer;

  success = gst_video_frame_copy (&outframe, &inframe);
  gst_video_frame_unmap (&outframe);
  gst_video_frame_unmap (&inframe);
  if (!success)
    goto error_copy_buffer;

  return buf;

bail:
  {
    if (buf)
      gst_buffer_unref (buf);
    return NULL;
  }

  /* ERRORS */
activate_pool_failed:
  {
    GST_ELEMENT_ERROR (self, STREAM, FAILED, ("failed to activate buffer pool"),
        ("failed to activate buffer pool"));
    goto bail;
  }
create_buffer_failed:
  {
    GST_ELEMENT_ERROR (self, STREAM, FAILED, ("allocation failed"),
        ("failed to create buffer"));
    goto bail;
  }
error_copy_buffer:
  {
    GST_WARNING_OBJECT (self, "failed to upload buffer");
    goto bail;
  }
error_map_dst_buffer:
  {
    gst_video_frame_unmap (&inframe);
    /* fall-through */
  }
error_map_src_buffer:
  {
    GST_WARNING_OBJECT (self, "failed to map buffer");
    goto bail;
  }
}
Exemplo n.º 12
0
gboolean gst_imx_ipu_blitter_set_input_buffer(GstImxIpuBlitter *ipu_blitter, GstBuffer *input_buffer)
{
	GstImxPhysMemMeta *phys_mem_meta;

	g_assert(input_buffer != NULL);

	phys_mem_meta = GST_IMX_PHYS_MEM_META_GET(input_buffer);

	/* Test if the input buffer uses DMA memory */
	if ((phys_mem_meta != NULL) && (phys_mem_meta->phys_addr != 0))
	{
		/* DMA memory present - the input buffer can be used as an actual input buffer */
		gst_imx_ipu_blitter_set_actual_input_buffer(ipu_blitter, gst_buffer_ref(input_buffer));

		GST_TRACE_OBJECT(ipu_blitter, "input buffer uses DMA memory - setting it as actual input buffer");
	}
	else
	{
		/* No DMA memory present; the input buffer needs to be copied to an internal
		 * temporary input buffer */

		GstBuffer *temp_input_buffer;
		GstFlowReturn flow_ret;

		GST_TRACE_OBJECT(ipu_blitter, "input buffer does not use DMA memory - need to copy it to an internal input DMA buffer");

		{
			/* The internal input buffer is the temp input frame's DMA memory.
			 * If it does not exist yet, it needs to be created here. The temp input
			 * frame is then mapped. */

			if (ipu_blitter->internal_bufferpool == NULL)
			{
				/* Internal bufferpool does not exist yet - create it now,
				 * so that it can in turn create the internal input buffer */

				GstCaps *caps = gst_video_info_to_caps(&(ipu_blitter->input_video_info));

				ipu_blitter->internal_bufferpool = gst_imx_ipu_blitter_create_bufferpool(
					ipu_blitter,
					caps,
					ipu_blitter->input_video_info.size,
					2, 0,
					NULL,
					NULL
				);

				gst_caps_unref(caps);

				if (ipu_blitter->internal_bufferpool == NULL)
				{
					GST_ERROR_OBJECT(ipu_blitter, "failed to create internal bufferpool");
					return FALSE;
				}
			}

			/* Future versions of this code may propose the internal bufferpool upstream;
			 * hence the is_active check */
			if (!gst_buffer_pool_is_active(ipu_blitter->internal_bufferpool))
				gst_buffer_pool_set_active(ipu_blitter->internal_bufferpool, TRUE);
		}

		/* Create the internal input buffer */
		flow_ret = gst_buffer_pool_acquire_buffer(ipu_blitter->internal_bufferpool, &temp_input_buffer, NULL);
		if (flow_ret != GST_FLOW_OK)
		{
			GST_ERROR_OBJECT(ipu_blitter, "error acquiring input frame buffer: %s", gst_pad_mode_get_name(flow_ret));
			return FALSE;
		}

		{
			GstVideoFrame input_frame, temp_input_frame;

			gst_video_frame_map(&input_frame, &(ipu_blitter->input_video_info), input_buffer, GST_MAP_READ);
			gst_video_frame_map(&temp_input_frame, &(ipu_blitter->input_video_info), temp_input_buffer, GST_MAP_WRITE);

			/* Copy the input buffer's pixels to the temp input frame
			 * The gst_video_frame_copy() makes sure stride and plane offset values from both
			 * frames are respected */
			gst_video_frame_copy(&temp_input_frame, &input_frame);

			gst_video_frame_unmap(&temp_input_frame);
			gst_video_frame_unmap(&input_frame);
		}

		/* Finally, set the temp input buffer as the actual input buffer */
		gst_imx_ipu_blitter_set_actual_input_buffer(ipu_blitter, temp_input_buffer);
	}

	/* Configure interlacing */
	ipu_blitter->priv->task.input.deinterlace.enable = 0;
	if (ipu_blitter->deinterlace_mode != GST_IMX_IPU_BLITTER_DEINTERLACE_NONE)
	{
		switch (ipu_blitter->input_video_info.interlace_mode)
		{
			case GST_VIDEO_INTERLACE_MODE_INTERLEAVED:
				GST_TRACE_OBJECT(ipu_blitter, "input stream uses interlacing -> deinterlacing enabled");
				ipu_blitter->priv->task.input.deinterlace.enable = 1;
				break;
			case GST_VIDEO_INTERLACE_MODE_MIXED:
			{
				GstVideoMeta *video_meta;

				GST_TRACE_OBJECT(ipu_blitter, "input stream uses mixed interlacing -> need to check video metadata deinterlacing flag");

				video_meta = gst_buffer_get_video_meta(input_buffer);
				if (video_meta != NULL)
				{
					if (video_meta->flags & GST_VIDEO_FRAME_FLAG_INTERLACED)
					{
						GST_TRACE_OBJECT(ipu_blitter, "frame has video metadata and deinterlacing flag");
						ipu_blitter->priv->task.input.deinterlace.enable = 1;
					}
					else
						GST_TRACE_OBJECT(ipu_blitter, "frame has video metadata but no deinterlacing flag");
				}
				else
					GST_TRACE_OBJECT(ipu_blitter, "frame has no video metadata -> no deinterlacing done");

				break;
			}
			case GST_VIDEO_INTERLACE_MODE_PROGRESSIVE:
			{
				GST_TRACE_OBJECT(ipu_blitter, "input stream is progressive -> no deinterlacing necessary");
				break;
			}
			case GST_VIDEO_INTERLACE_MODE_FIELDS:
				GST_FIXME_OBJECT(ipu_blitter, "2-fields deinterlacing not supported (yet)");
				break;
			default:
				break;
		}
	}

	return TRUE;
}
Exemplo n.º 13
0
static GstFlowReturn gst_imx_vpu_base_enc_handle_frame(GstVideoEncoder *encoder, GstVideoCodecFrame *frame)
{
	VpuEncRetCode enc_ret;
	VpuEncEncParam enc_enc_param;
	GstImxPhysMemMeta *phys_mem_meta;
	GstImxVpuBaseEncClass *klass;
	GstImxVpuBaseEnc *vpu_base_enc;
	VpuFrameBuffer input_framebuf;
	GstBuffer *input_buffer;
	gint src_stride;

	vpu_base_enc = GST_IMX_VPU_BASE_ENC(encoder);
	klass = GST_IMX_VPU_BASE_ENC_CLASS(G_OBJECT_GET_CLASS(vpu_base_enc));

	g_assert(klass->set_frame_enc_params != NULL);

	memset(&enc_enc_param, 0, sizeof(enc_enc_param));
	memset(&input_framebuf, 0, sizeof(input_framebuf));

	phys_mem_meta = GST_IMX_PHYS_MEM_META_GET(frame->input_buffer);

	/* If the incoming frame's buffer is not using physically contiguous memory,
	 * it needs to be copied to the internal input buffer, otherwise the VPU
	 * encoder cannot read the frame */
	if (phys_mem_meta == NULL)
	{
		/* No physical memory metadata found -> buffer is not physically contiguous */

		GstVideoFrame temp_input_video_frame, temp_incoming_video_frame;

		GST_LOG_OBJECT(vpu_base_enc, "input buffer not physically contiguous - frame copy is necessary");

		if (vpu_base_enc->internal_input_buffer == NULL)
		{
			/* The internal input buffer is the temp input frame's DMA memory.
			 * If it does not exist yet, it needs to be created here. The temp input
			 * frame is then mapped. */

			GstFlowReturn flow_ret;

			if (vpu_base_enc->internal_bufferpool == NULL)
			{
				/* Internal bufferpool does not exist yet - create it now,
				 * so that it can in turn create the internal input buffer */

				GstStructure *config;
				GstCaps *caps;
				GstAllocator *allocator;

				GST_DEBUG_OBJECT(vpu_base_enc, "creating internal bufferpool");

				caps = gst_video_info_to_caps(&(vpu_base_enc->video_info));
				vpu_base_enc->internal_bufferpool = gst_imx_phys_mem_buffer_pool_new(FALSE);
				allocator = gst_imx_vpu_enc_allocator_obtain();

				config = gst_buffer_pool_get_config(vpu_base_enc->internal_bufferpool);
				gst_buffer_pool_config_set_params(config, caps, vpu_base_enc->video_info.size, 2, 0);
				gst_buffer_pool_config_set_allocator(config, allocator, NULL);
				gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_IMX_PHYS_MEM);
				gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_VIDEO_META);
				gst_buffer_pool_set_config(vpu_base_enc->internal_bufferpool, config);

				gst_caps_unref(caps);

				if (vpu_base_enc->internal_bufferpool == NULL)
				{
					GST_ERROR_OBJECT(vpu_base_enc, "failed to create internal bufferpool");
					return GST_FLOW_ERROR;
				}
			}

			/* Future versions of this code may propose the internal bufferpool upstream;
			 * hence the is_active check */
			if (!gst_buffer_pool_is_active(vpu_base_enc->internal_bufferpool))
				gst_buffer_pool_set_active(vpu_base_enc->internal_bufferpool, TRUE);

			/* Create the internal input buffer */
			flow_ret = gst_buffer_pool_acquire_buffer(vpu_base_enc->internal_bufferpool, &(vpu_base_enc->internal_input_buffer), NULL);
			if (flow_ret != GST_FLOW_OK)
			{
				GST_ERROR_OBJECT(vpu_base_enc, "error acquiring input frame buffer: %s", gst_pad_mode_get_name(flow_ret));
				return flow_ret;
			}
		}

		/* The internal input buffer exists at this point. Since the incoming frame
		 * is not stored in physical memory, copy its pixels to the internal
		 * input buffer, so the encoder can read them. */

		gst_video_frame_map(&temp_incoming_video_frame, &(vpu_base_enc->video_info), frame->input_buffer, GST_MAP_READ);
		gst_video_frame_map(&temp_input_video_frame, &(vpu_base_enc->video_info), vpu_base_enc->internal_input_buffer, GST_MAP_WRITE);

		gst_video_frame_copy(&temp_input_video_frame, &temp_incoming_video_frame);

		gst_video_frame_unmap(&temp_incoming_video_frame);
		gst_video_frame_unmap(&temp_input_video_frame);

		/* Set the internal input buffer as the encoder's input */
		input_buffer = vpu_base_enc->internal_input_buffer;
		/* And use the internal input buffer's physical memory metadata */
		phys_mem_meta = GST_IMX_PHYS_MEM_META_GET(vpu_base_enc->internal_input_buffer);
	}
	else
	{
		/* Physical memory metadata found -> buffer is physically contiguous
		 * It can be used directly as input for the VPU encoder */
		input_buffer = frame->input_buffer;
	}

	/* Set up physical addresses for the input framebuffer */
	{
		gsize *plane_offsets;
		gint *plane_strides;
		GstVideoMeta *video_meta;
		unsigned char *phys_ptr;

		/* Try to use plane offset and stride information from the video
		 * metadata if present, since these can be more accurate than
		 * the information from the video info */
		video_meta = gst_buffer_get_video_meta(input_buffer);
		if (video_meta != NULL)
		{
			plane_offsets = video_meta->offset;
			plane_strides = video_meta->stride;
		}
		else
		{
			plane_offsets = vpu_base_enc->video_info.offset;
			plane_strides = vpu_base_enc->video_info.stride;
		}

		phys_ptr = (unsigned char*)(phys_mem_meta->phys_addr);

		input_framebuf.pbufY = phys_ptr;
		input_framebuf.pbufCb = phys_ptr + plane_offsets[1];
		input_framebuf.pbufCr = phys_ptr + plane_offsets[2];
		input_framebuf.pbufMvCol = NULL; /* not used by the VPU encoder */
		input_framebuf.nStrideY = plane_strides[0];
		input_framebuf.nStrideC = plane_strides[1];

		/* this is needed for framebuffers registration below */
		src_stride = plane_strides[0];

		GST_TRACE_OBJECT(vpu_base_enc, "width: %d   height: %d   stride 0: %d   stride 1: %d   offset 0: %d   offset 1: %d   offset 2: %d", GST_VIDEO_INFO_WIDTH(&(vpu_base_enc->video_info)), GST_VIDEO_INFO_HEIGHT(&(vpu_base_enc->video_info)), plane_strides[0], plane_strides[1], plane_offsets[0], plane_offsets[1], plane_offsets[2]);
	}

	/* Create framebuffers structure (if not already present) */
	if (vpu_base_enc->framebuffers == NULL)
	{
		GstImxVpuFramebufferParams fbparams;
		gst_imx_vpu_framebuffers_enc_init_info_to_params(&(vpu_base_enc->init_info), &fbparams);
		fbparams.pic_width = vpu_base_enc->open_param.nPicWidth;
		fbparams.pic_height = vpu_base_enc->open_param.nPicHeight;

		vpu_base_enc->framebuffers = gst_imx_vpu_framebuffers_new(&fbparams, gst_imx_vpu_enc_allocator_obtain());
		if (vpu_base_enc->framebuffers == NULL)
		{
			GST_ELEMENT_ERROR(vpu_base_enc, RESOURCE, NO_SPACE_LEFT, ("could not create framebuffers structure"), (NULL));
			return GST_FLOW_ERROR;
		}

		gst_imx_vpu_framebuffers_register_with_encoder(vpu_base_enc->framebuffers, vpu_base_enc->handle, src_stride);
	}

	/* Allocate physical buffer for output data (if not already present) */
	if (vpu_base_enc->output_phys_buffer == NULL)
	{
		vpu_base_enc->output_phys_buffer = (GstImxPhysMemory *)gst_allocator_alloc(gst_imx_vpu_enc_allocator_obtain(), vpu_base_enc->framebuffers->total_size, NULL);

		if (vpu_base_enc->output_phys_buffer == NULL)
		{
			GST_ERROR_OBJECT(vpu_base_enc, "could not allocate physical buffer for output data");
			return GST_FLOW_ERROR;
		}
	}

	/* Set up encoding parameters */
	enc_enc_param.nInVirtOutput = (unsigned int)(vpu_base_enc->output_phys_buffer->mapped_virt_addr); /* TODO */
	enc_enc_param.nInPhyOutput = (unsigned int)(vpu_base_enc->output_phys_buffer->phys_addr);
	enc_enc_param.nInOutputBufLen = vpu_base_enc->output_phys_buffer->mem.size;
	enc_enc_param.nPicWidth = vpu_base_enc->framebuffers->pic_width;
	enc_enc_param.nPicHeight = vpu_base_enc->framebuffers->pic_height;
	enc_enc_param.nFrameRate = vpu_base_enc->open_param.nFrameRate;
	enc_enc_param.pInFrame = &input_framebuf;
	enc_enc_param.nForceIPicture = 0;

	/* Force I-frame if either IS_FORCE_KEYFRAME or IS_FORCE_KEYFRAME_HEADERS is set for the current frame. */
	if (GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME(frame) || GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME_HEADERS(frame))
	{
		enc_enc_param.nForceIPicture = 1;
		GST_LOG_OBJECT(vpu_base_enc, "got request to make this a keyframe - forcing I frame");
		GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT(frame);
	}

	/* Give the derived class a chance to set up encoding parameters too */
	if (!klass->set_frame_enc_params(vpu_base_enc, &enc_enc_param, &(vpu_base_enc->open_param)))
	{
		GST_ERROR_OBJECT(vpu_base_enc, "derived class could not frame enc params");
		return GST_FLOW_ERROR;
	}

	/* Main encoding block */
	{
		GstBuffer *output_buffer = NULL;
		gsize output_buffer_offset = 0;
		gboolean frame_finished = FALSE;

		frame->output_buffer = NULL;

		/* Run in a loop until the VPU reports the input as used */
		do
		{
			/* Feed input data */
			enc_ret = VPU_EncEncodeFrame(vpu_base_enc->handle, &enc_enc_param);
			if (enc_ret != VPU_ENC_RET_SUCCESS)
			{
				GST_ERROR_OBJECT(vpu_base_enc, "failed to encode frame: %s", gst_imx_vpu_strerror(enc_ret));
				VPU_EncReset(vpu_base_enc->handle);
				return GST_FLOW_ERROR;
			}

			if (frame_finished)
			{
				GST_WARNING_OBJECT(vpu_base_enc, "frame was already finished for the current input, but input not yet marked as used");
				continue;
			}

			if (enc_enc_param.eOutRetCode & (VPU_ENC_OUTPUT_DIS | VPU_ENC_OUTPUT_SEQHEADER))
			{
				/* Create an output buffer on demand */
				if (output_buffer == NULL)
				{
					output_buffer = gst_video_encoder_allocate_output_buffer(
						encoder,
						vpu_base_enc->output_phys_buffer->mem.size
					);
					frame->output_buffer = output_buffer;
				}

				GST_LOG_OBJECT(vpu_base_enc, "processing output data: %u bytes, output buffer offset %u", enc_enc_param.nOutOutputSize, output_buffer_offset);

				if (klass->fill_output_buffer != NULL)
				{
					/* Derived class fills data on its own */

					gsize cur_offset = output_buffer_offset;
					output_buffer_offset += klass->fill_output_buffer(
						vpu_base_enc,
						frame,
						cur_offset,
						vpu_base_enc->output_phys_buffer->mapped_virt_addr,
						enc_enc_param.nOutOutputSize,
						enc_enc_param.eOutRetCode & VPU_ENC_OUTPUT_SEQHEADER
					);
				}
				else
				{
					/* Use default data filling (= copy input to output) */

					gst_buffer_fill(
						output_buffer,
						output_buffer_offset,
						vpu_base_enc->output_phys_buffer->mapped_virt_addr,
						enc_enc_param.nOutOutputSize
					);
					output_buffer_offset += enc_enc_param.nOutOutputSize;
				}

				if (enc_enc_param.eOutRetCode & VPU_ENC_OUTPUT_DIS)
				{
					g_assert(output_buffer != NULL);

					/* Set the output buffer's size to the actual number of bytes
					 * filled by the derived class */
					gst_buffer_set_size(output_buffer, output_buffer_offset);

					/* Set the frame DTS */
					frame->dts = frame->pts;

					/* And finish the frame, handing the output data over to the base class */
					gst_video_encoder_finish_frame(encoder, frame);

					output_buffer = NULL;
					frame_finished = TRUE;

					if (!(enc_enc_param.eOutRetCode & VPU_ENC_INPUT_USED))
						GST_WARNING_OBJECT(vpu_base_enc, "frame finished, but VPU did not report the input as used");

					break;
				}
			}
		}
		while (!(enc_enc_param.eOutRetCode & VPU_ENC_INPUT_USED)); /* VPU_ENC_INPUT_NOT_USED has value 0x0 - cannot use it for flag checks */

		/* If output_buffer is NULL at this point, it means VPU_ENC_OUTPUT_DIS was never communicated
		 * by the VPU, and the buffer is unfinished. -> Drop it. */
		if (output_buffer != NULL)
		{
			GST_WARNING_OBJECT(vpu_base_enc, "frame unfinished ; dropping");
			gst_buffer_unref(output_buffer);
			frame->output_buffer = NULL; /* necessary to make finish_frame() drop the frame */
			gst_video_encoder_finish_frame(encoder, frame);
		}
	}

	return GST_FLOW_OK;
}
Exemplo n.º 14
0
static GstFlowReturn gst_imx_vpu_encoder_base_handle_frame(GstVideoEncoder *encoder, GstVideoCodecFrame *input_frame)
{
	GstImxPhysMemMeta *phys_mem_meta;
	GstImxVpuEncoderBaseClass *klass;
	GstImxVpuEncoderBase *vpu_encoder_base;
	GstBuffer *input_buffer;
	ImxVpuEncParams enc_params;

	vpu_encoder_base = GST_IMX_VPU_ENCODER_BASE(encoder);
	klass = GST_IMX_VPU_ENCODER_BASE_CLASS(G_OBJECT_GET_CLASS(vpu_encoder_base));

	if (vpu_encoder_base->drop)
	{
		input_frame->output_buffer = NULL; /* necessary to make finish_frame() drop the frame */
		gst_video_encoder_finish_frame(encoder, input_frame);
		return GST_FLOW_OK;
	}

	/* Get access to the input buffer's physical address */

	phys_mem_meta = GST_IMX_PHYS_MEM_META_GET(input_frame->input_buffer);

	/* If the incoming frame's buffer is not using physically contiguous memory,
	 * it needs to be copied to the internal input buffer, otherwise the VPU
	 * encoder cannot read the frame */
	if (phys_mem_meta == NULL)
	{
		/* No physical memory metadata found -> buffer is not physically contiguous */

		GstVideoFrame temp_input_video_frame, temp_incoming_video_frame;

		GST_LOG_OBJECT(vpu_encoder_base, "input buffer not physically contiguous - frame copy is necessary");

		if (vpu_encoder_base->internal_input_buffer == NULL)
		{
			/* The internal input buffer is the temp input frame's DMA memory.
			 * If it does not exist yet, it needs to be created here. The temp input
			 * frame is then mapped. */

			GstFlowReturn flow_ret;

			if (vpu_encoder_base->internal_input_bufferpool == NULL)
			{
				/* Internal bufferpool does not exist yet - create it now,
				 * so that it can in turn create the internal input buffer */

				GstStructure *config;
				GstCaps *caps;

				GST_DEBUG_OBJECT(vpu_encoder_base, "creating internal bufferpool");

				caps = gst_video_info_to_caps(&(vpu_encoder_base->video_info));
				vpu_encoder_base->internal_input_bufferpool = gst_imx_phys_mem_buffer_pool_new(FALSE);

				gst_object_ref(vpu_encoder_base->phys_mem_allocator);

				config = gst_buffer_pool_get_config(vpu_encoder_base->internal_input_bufferpool);
				gst_buffer_pool_config_set_params(config, caps, vpu_encoder_base->video_info.size, 2, 0);
				gst_buffer_pool_config_set_allocator(config, vpu_encoder_base->phys_mem_allocator, NULL);
				gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_IMX_PHYS_MEM);
				gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_VIDEO_META);
				gst_buffer_pool_set_config(vpu_encoder_base->internal_input_bufferpool, config);

				gst_caps_unref(caps);

				if (vpu_encoder_base->internal_input_bufferpool == NULL)
				{
					GST_ERROR_OBJECT(vpu_encoder_base, "failed to create internal bufferpool");
					return GST_FLOW_ERROR;
				}
			}

			/* Future versions of this code may propose the internal bufferpool upstream;
			 * hence the is_active check */
			if (!gst_buffer_pool_is_active(vpu_encoder_base->internal_input_bufferpool))
				gst_buffer_pool_set_active(vpu_encoder_base->internal_input_bufferpool, TRUE);

			/* Create the internal input buffer */
			flow_ret = gst_buffer_pool_acquire_buffer(vpu_encoder_base->internal_input_bufferpool, &(vpu_encoder_base->internal_input_buffer), NULL);
			if (flow_ret != GST_FLOW_OK)
			{
				GST_ERROR_OBJECT(vpu_encoder_base, "error acquiring input frame buffer: %s", gst_pad_mode_get_name(flow_ret));
				return flow_ret;
			}
		}

		/* The internal input buffer exists at this point. Since the incoming frame
		 * is not stored in physical memory, copy its pixels to the internal
		 * input buffer, so the encoder can read them. */

		gst_video_frame_map(&temp_incoming_video_frame, &(vpu_encoder_base->video_info), input_frame->input_buffer, GST_MAP_READ);
		gst_video_frame_map(&temp_input_video_frame, &(vpu_encoder_base->video_info), vpu_encoder_base->internal_input_buffer, GST_MAP_WRITE);

		gst_video_frame_copy(&temp_input_video_frame, &temp_incoming_video_frame);

		gst_video_frame_unmap(&temp_incoming_video_frame);
		gst_video_frame_unmap(&temp_input_video_frame);

		/* Set the input buffer as the encoder's input */
		input_buffer = vpu_encoder_base->internal_input_buffer;
		/* And use the input buffer's physical memory metadata */
		phys_mem_meta = GST_IMX_PHYS_MEM_META_GET(vpu_encoder_base->internal_input_buffer);
	}
	else
	{
		/* Physical memory metadata found -> buffer is physically contiguous
		 * It can be used directly as input for the VPU encoder */
		input_buffer = input_frame->input_buffer;
	}


	/* Prepare the input buffer's information (strides, plane offsets ..) for encoding */

	{
		GstVideoMeta *video_meta;

		/* Try to use plane offset and stride information from the video
		 * metadata if present, since these can be more accurate than
		 * the information from the video info */
		video_meta = gst_buffer_get_video_meta(input_buffer);
		if (video_meta != NULL)
		{
			vpu_encoder_base->input_framebuffer.y_stride = video_meta->stride[0];
			vpu_encoder_base->input_framebuffer.cbcr_stride = video_meta->stride[1];

			vpu_encoder_base->input_framebuffer.y_offset = video_meta->offset[0];
			vpu_encoder_base->input_framebuffer.cb_offset = video_meta->offset[1];
			vpu_encoder_base->input_framebuffer.cr_offset = video_meta->offset[2];
		}
		else
		{
			vpu_encoder_base->input_framebuffer.y_stride = GST_VIDEO_INFO_PLANE_STRIDE(&(vpu_encoder_base->video_info), 0);
			vpu_encoder_base->input_framebuffer.cbcr_stride = GST_VIDEO_INFO_PLANE_STRIDE(&(vpu_encoder_base->video_info), 1);

			vpu_encoder_base->input_framebuffer.y_offset = GST_VIDEO_INFO_PLANE_OFFSET(&(vpu_encoder_base->video_info), 0);
			vpu_encoder_base->input_framebuffer.cb_offset = GST_VIDEO_INFO_PLANE_OFFSET(&(vpu_encoder_base->video_info), 1);
			vpu_encoder_base->input_framebuffer.cr_offset = GST_VIDEO_INFO_PLANE_OFFSET(&(vpu_encoder_base->video_info), 2);
		}

		vpu_encoder_base->input_framebuffer.mvcol_offset = 0; /* this is not used by the encoder */
		vpu_encoder_base->input_framebuffer.context = (void *)(input_frame->system_frame_number);

		vpu_encoder_base->input_dmabuffer.fd = -1;
		vpu_encoder_base->input_dmabuffer.physical_address = phys_mem_meta->phys_addr;
		vpu_encoder_base->input_dmabuffer.size = gst_buffer_get_size(input_buffer);
	}


	/* Prepare the encoding parameters */

	memset(&enc_params, 0, sizeof(enc_params));
	imx_vpu_enc_set_default_encoding_params(vpu_encoder_base->encoder, &enc_params);
	enc_params.force_I_frame = 0;
	enc_params.acquire_output_buffer = gst_imx_vpu_encoder_base_acquire_output_buffer;
	enc_params.finish_output_buffer = gst_imx_vpu_encoder_base_finish_output_buffer;
	enc_params.output_buffer_context = vpu_encoder_base;

	/* Force I-frame if either IS_FORCE_KEYFRAME or IS_FORCE_KEYFRAME_HEADERS is set for the current frame. */
	if (GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME(input_frame) || GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME_HEADERS(input_frame))
	{
		enc_params.force_I_frame = 1;
		GST_LOG_OBJECT(vpu_encoder_base, "got request to make this a keyframe - forcing I frame");
		GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT(input_frame);
	}

	/* Give the derived class a chance to set up encoding parameters too */
	if ((klass->set_frame_enc_params != NULL) && !klass->set_frame_enc_params(vpu_encoder_base, &enc_params))
	{
		GST_ERROR_OBJECT(vpu_encoder_base, "derived class could not frame enc params");
		return GST_FLOW_ERROR;
	}


	/* Main encoding block */
	{
		ImxVpuEncReturnCodes enc_ret;
		unsigned int output_code = 0;
		ImxVpuEncodedFrame encoded_data_frame;

		vpu_encoder_base->output_buffer = NULL;

		/* The actual encoding call */
		memset(&encoded_data_frame, 0, sizeof(ImxVpuEncodedFrame));
		enc_ret = imx_vpu_enc_encode(vpu_encoder_base->encoder, &(vpu_encoder_base->input_frame), &encoded_data_frame, &enc_params, &output_code);
		if (enc_ret != IMX_VPU_ENC_RETURN_CODE_OK)
		{
			GST_ERROR_OBJECT(vpu_encoder_base, "failed to encode frame: %s", imx_vpu_enc_error_string(enc_ret));
			if (vpu_encoder_base->output_buffer != NULL)
				gst_buffer_unref(vpu_encoder_base->output_buffer);
			return GST_FLOW_ERROR;
		}

		/* Give the derived class a chance to process the output_block_buffer */
		if ((klass->process_output_buffer != NULL) && !klass->process_output_buffer(vpu_encoder_base, input_frame, &(vpu_encoder_base->output_buffer)))
		{
			GST_ERROR_OBJECT(vpu_encoder_base, "derived class reports failure while processing encoded output");
			if (vpu_encoder_base->output_buffer != NULL)
				gst_buffer_unref(vpu_encoder_base->output_buffer);
			return GST_FLOW_ERROR;
		}

		if (output_code & IMX_VPU_ENC_OUTPUT_CODE_ENCODED_FRAME_AVAILABLE)
		{
			GST_LOG_OBJECT(vpu_encoder_base, "VPU outputs encoded frame");

			/* TODO: make use of the frame context that is retrieved with get_frame(i)
			 * This is not strictly necessary, since the VPU encoder does not
			 * do frame reordering, nor does it produce delays, but it would
			 * be a bit cleaner. */

			input_frame->dts = input_frame->pts;

			/* Take all of the encoded bits. The adapter contains an encoded frame
			 * at this point. */
			input_frame->output_buffer = vpu_encoder_base->output_buffer;

			/* And finish the frame, handing the output data over to the base class */
			gst_video_encoder_finish_frame(encoder, input_frame);
		}
		else
		{
			/* If at this point IMX_VPU_ENC_OUTPUT_CODE_ENCODED_FRAME_AVAILABLE is not set
			 * in the output_code, it means the input was used up before a frame could be
			 * encoded. Therefore, no output frame can be pushed downstream. Note that this
			 * should not happen during normal operation, so a warning is logged. */

			if (vpu_encoder_base->output_buffer != NULL)
				gst_buffer_unref(vpu_encoder_base->output_buffer);

			GST_WARNING_OBJECT(vpu_encoder_base, "frame unfinished ; dropping");
			input_frame->output_buffer = NULL; /* necessary to make finish_frame() drop the frame */
			gst_video_encoder_finish_frame(encoder, input_frame);
		}
	}


	return GST_FLOW_OK;
}