コード例 #1
0
ファイル: vpuenc.c プロジェクト: lenver/usbvideo
int enc_stream(unsigned int paddr, unsigned char **ppout)
{
	VpuEncRetCode res;

	gEncParam.nInPhyInput = paddr;

	res = VPU_EncEncodeFrame(gHandle, &gEncParam);
	if(VPU_ENC_RET_SUCCESS != res) {
		if(VPU_ENC_RET_FAILURE_TIMEOUT == res) {
			printf("VPU_ENC register frame buffer error : %d\n", res);
			// VPU_EncReset(gHandle);
		}
		return -1;
	}

	*ppout = (unsigned char *)gEncParam.nInVirtOutput;

	// printf("VPU_ENC encode stream res = 0x%08x\n", gEncParam.eOutRetCode);

	if((gEncParam.eOutRetCode & VPU_ENC_OUTPUT_DIS) ||
	   (gEncParam.eOutRetCode & VPU_ENC_OUTPUT_SEQHEADER))
	{
		return gEncParam.nOutOutputSize;
	}

	return 0;
}
コード例 #2
0
ファイル: base_enc.c プロジェクト: Radbug/gst-fsl
static GstFlowReturn gst_fsl_vpu_base_enc_handle_frame(GstVideoEncoder *encoder, GstVideoCodecFrame *frame)
{
	VpuEncRetCode enc_ret;
	VpuEncEncParam enc_enc_param;
	GstFslPhysMemMeta *phys_mem_meta;
	GstFslVpuBaseEncClass *klass;
	GstFslVpuBaseEnc *vpu_base_enc;
	VpuFrameBuffer input_framebuf;
	GstBuffer *input_buffer;

	vpu_base_enc = GST_FSL_VPU_BASE_ENC(encoder);
	klass = GST_FSL_VPU_BASE_ENC_CLASS(G_OBJECT_GET_CLASS(vpu_base_enc));

	g_assert(klass->set_frame_enc_params != NULL);

	memset(&enc_enc_param, 0, sizeof(enc_enc_param));
	memset(&input_framebuf, 0, sizeof(input_framebuf));

	phys_mem_meta = GST_FSL_PHYS_MEM_META_GET(frame->input_buffer);

	if (phys_mem_meta == NULL)
	{
		GstVideoFrame temp_input_video_frame, temp_incoming_video_frame;

		if (vpu_base_enc->internal_input_buffer == NULL)
		{
			/* The internal input buffer is the temp input frame's DMA memory.
			 * If it does not exist yet, it needs to be created here. The temp input
			 * frame is then mapped. */

			GstFlowReturn flow_ret;

			if (vpu_base_enc->internal_bufferpool == NULL)
			{
				/* Internal bufferpool does not exist yet - create it now,
				 * so that it can in turn create the internal input buffer */

				GstStructure *config;
				GstCaps *caps;
				GstAllocator *allocator;

				caps = gst_video_info_to_caps(&(vpu_base_enc->video_info));
				vpu_base_enc->internal_bufferpool = gst_fsl_phys_mem_buffer_pool_new(FALSE);
				allocator = gst_fsl_vpu_enc_allocator_obtain();

				config = gst_buffer_pool_get_config(vpu_base_enc->internal_bufferpool);
				gst_buffer_pool_config_set_params(config, caps, vpu_base_enc->video_info.size, 2, 0);
				gst_buffer_pool_config_set_allocator(config, allocator, NULL);
				gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_FSL_PHYS_MEM);
				gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_VIDEO_META);
				gst_buffer_pool_set_config(vpu_base_enc->internal_bufferpool, config);

				gst_caps_unref(caps);

				if (vpu_base_enc->internal_bufferpool == NULL)
				{
					GST_ERROR_OBJECT(vpu_base_enc, "failed to create internal bufferpool");
					return FALSE;
				}
			}

			/* Future versions of this code may propose the internal bufferpool upstream;
			 * hence the is_active check */
			if (!gst_buffer_pool_is_active(vpu_base_enc->internal_bufferpool))
				gst_buffer_pool_set_active(vpu_base_enc->internal_bufferpool, TRUE);

			/* Create the internal input buffer */
			flow_ret = gst_buffer_pool_acquire_buffer(vpu_base_enc->internal_bufferpool, &(vpu_base_enc->internal_input_buffer), NULL);
			if (flow_ret != GST_FLOW_OK)
			{
				GST_ERROR_OBJECT(vpu_base_enc, "error acquiring input frame buffer: %s", gst_pad_mode_get_name(flow_ret));
				return FALSE;
			}
		}

		gst_video_frame_map(&temp_incoming_video_frame, &(vpu_base_enc->video_info), frame->input_buffer, GST_MAP_READ);
		gst_video_frame_map(&temp_input_video_frame, &(vpu_base_enc->video_info), vpu_base_enc->internal_input_buffer, GST_MAP_WRITE);

		gst_video_frame_copy(&temp_input_video_frame, &temp_incoming_video_frame);

		gst_video_frame_unmap(&temp_incoming_video_frame);
		gst_video_frame_unmap(&temp_input_video_frame);

		input_buffer = vpu_base_enc->internal_input_buffer;
		phys_mem_meta = GST_FSL_PHYS_MEM_META_GET(vpu_base_enc->internal_input_buffer);
	}
	else
		input_buffer = frame->input_buffer;

	{
		gsize *plane_offsets;
		gint *plane_strides;
		GstVideoMeta *video_meta;
		unsigned char *phys_ptr;

		video_meta = gst_buffer_get_video_meta(input_buffer);
		if (video_meta != NULL)
		{
			plane_offsets = video_meta->offset;
			plane_strides = video_meta->stride;
		}
		else
		{
			plane_offsets = vpu_base_enc->video_info.offset;
			plane_strides = vpu_base_enc->video_info.stride;
		}

		phys_ptr = (unsigned char*)(phys_mem_meta->phys_addr);

		input_framebuf.pbufY = phys_ptr;
		input_framebuf.pbufCb = phys_ptr + plane_offsets[1];
		input_framebuf.pbufCr = phys_ptr + plane_offsets[2];
		input_framebuf.pbufMvCol = NULL;
		input_framebuf.nStrideY = plane_strides[0];
		input_framebuf.nStrideC = plane_strides[1];

		GST_TRACE_OBJECT(vpu_base_enc, "width: %d   height: %d   stride 0: %d   stride 1: %d   offset 0: %d   offset 1: %d   offset 2: %d", GST_VIDEO_INFO_WIDTH(&(vpu_base_enc->video_info)), GST_VIDEO_INFO_HEIGHT(&(vpu_base_enc->video_info)), plane_strides[0], plane_strides[1], plane_offsets[0], plane_offsets[1], plane_offsets[2]);

		if (vpu_base_enc->framebuffers == NULL)
		{
			GstFslVpuFramebufferParams fbparams;
			gst_fsl_vpu_framebuffers_enc_init_info_to_params(&(vpu_base_enc->init_info), &fbparams);
			fbparams.pic_width = vpu_base_enc->open_param.nPicWidth;
			fbparams.pic_height = vpu_base_enc->open_param.nPicHeight;
			vpu_base_enc->framebuffers = gst_fsl_vpu_framebuffers_new(&fbparams, gst_fsl_vpu_enc_allocator_obtain());
			gst_fsl_vpu_framebuffers_register_with_encoder(vpu_base_enc->framebuffers, vpu_base_enc->handle, plane_strides[0]);
		}

		if (vpu_base_enc->output_phys_buffer == NULL)
		{
			vpu_base_enc->output_phys_buffer = (GstFslPhysMemory *)gst_allocator_alloc(gst_fsl_vpu_enc_allocator_obtain(), vpu_base_enc->framebuffers->total_size, NULL);

			if (vpu_base_enc->output_phys_buffer == NULL)
			{
				GST_ERROR_OBJECT(vpu_base_enc, "could not allocate physical buffer for output data");
				return GST_FLOW_ERROR;
			}
		}
	}

	enc_enc_param.nInVirtOutput = (unsigned int)(vpu_base_enc->output_phys_buffer->mapped_virt_addr); /* TODO */
	enc_enc_param.nInPhyOutput = (unsigned int)(vpu_base_enc->output_phys_buffer->phys_addr);
	enc_enc_param.nInOutputBufLen = vpu_base_enc->output_phys_buffer->mem.size;
	enc_enc_param.nPicWidth = vpu_base_enc->framebuffers->pic_width;
	enc_enc_param.nPicHeight = vpu_base_enc->framebuffers->pic_height;
	enc_enc_param.nFrameRate = vpu_base_enc->open_param.nFrameRate;
	enc_enc_param.pInFrame = &input_framebuf;

	if (!klass->set_frame_enc_params(vpu_base_enc, &enc_enc_param, &(vpu_base_enc->open_param)))
	{
		GST_ERROR_OBJECT(vpu_base_enc, "derived class could not frame enc params");
		return GST_FLOW_ERROR;
	}

	enc_ret = VPU_EncEncodeFrame(vpu_base_enc->handle, &enc_enc_param);
	if (enc_ret != VPU_ENC_RET_SUCCESS)
	{
		GST_ERROR_OBJECT(vpu_base_enc, "failed to encode frame: %s", gst_fsl_vpu_strerror(enc_ret));
		VPU_EncReset(vpu_base_enc->handle);
		return GST_FLOW_ERROR;
	}

	GST_LOG_OBJECT(vpu_base_enc, "out ret code: 0x%x  out size: %u", enc_enc_param.eOutRetCode, enc_enc_param.nOutOutputSize);

	if ((enc_enc_param.eOutRetCode & VPU_ENC_OUTPUT_DIS) || (enc_enc_param.eOutRetCode & VPU_ENC_OUTPUT_SEQHEADER))
	{
		gst_video_encoder_allocate_output_frame(encoder, frame, enc_enc_param.nOutOutputSize);
		gst_buffer_fill(frame->output_buffer, 0, vpu_base_enc->output_phys_buffer->mapped_virt_addr, enc_enc_param.nOutOutputSize);
		gst_video_encoder_finish_frame(encoder, frame);
	}

	return GST_FLOW_OK;
}
コード例 #3
0
ファイル: base_enc.c プロジェクト: chamois94/gstreamer-imx
static GstFlowReturn gst_imx_vpu_base_enc_handle_frame(GstVideoEncoder *encoder, GstVideoCodecFrame *frame)
{
	VpuEncRetCode enc_ret;
	VpuEncEncParam enc_enc_param;
	GstImxPhysMemMeta *phys_mem_meta;
	GstImxVpuBaseEncClass *klass;
	GstImxVpuBaseEnc *vpu_base_enc;
	VpuFrameBuffer input_framebuf;
	GstBuffer *input_buffer;
	gint src_stride;

	vpu_base_enc = GST_IMX_VPU_BASE_ENC(encoder);
	klass = GST_IMX_VPU_BASE_ENC_CLASS(G_OBJECT_GET_CLASS(vpu_base_enc));

	g_assert(klass->set_frame_enc_params != NULL);

	memset(&enc_enc_param, 0, sizeof(enc_enc_param));
	memset(&input_framebuf, 0, sizeof(input_framebuf));

	phys_mem_meta = GST_IMX_PHYS_MEM_META_GET(frame->input_buffer);

	/* If the incoming frame's buffer is not using physically contiguous memory,
	 * it needs to be copied to the internal input buffer, otherwise the VPU
	 * encoder cannot read the frame */
	if (phys_mem_meta == NULL)
	{
		/* No physical memory metadata found -> buffer is not physically contiguous */

		GstVideoFrame temp_input_video_frame, temp_incoming_video_frame;

		GST_LOG_OBJECT(vpu_base_enc, "input buffer not physically contiguous - frame copy is necessary");

		if (vpu_base_enc->internal_input_buffer == NULL)
		{
			/* The internal input buffer is the temp input frame's DMA memory.
			 * If it does not exist yet, it needs to be created here. The temp input
			 * frame is then mapped. */

			GstFlowReturn flow_ret;

			if (vpu_base_enc->internal_bufferpool == NULL)
			{
				/* Internal bufferpool does not exist yet - create it now,
				 * so that it can in turn create the internal input buffer */

				GstStructure *config;
				GstCaps *caps;
				GstAllocator *allocator;

				GST_DEBUG_OBJECT(vpu_base_enc, "creating internal bufferpool");

				caps = gst_video_info_to_caps(&(vpu_base_enc->video_info));
				vpu_base_enc->internal_bufferpool = gst_imx_phys_mem_buffer_pool_new(FALSE);
				allocator = gst_imx_vpu_enc_allocator_obtain();

				config = gst_buffer_pool_get_config(vpu_base_enc->internal_bufferpool);
				gst_buffer_pool_config_set_params(config, caps, vpu_base_enc->video_info.size, 2, 0);
				gst_buffer_pool_config_set_allocator(config, allocator, NULL);
				gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_IMX_PHYS_MEM);
				gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_VIDEO_META);
				gst_buffer_pool_set_config(vpu_base_enc->internal_bufferpool, config);

				gst_caps_unref(caps);

				if (vpu_base_enc->internal_bufferpool == NULL)
				{
					GST_ERROR_OBJECT(vpu_base_enc, "failed to create internal bufferpool");
					return GST_FLOW_ERROR;
				}
			}

			/* Future versions of this code may propose the internal bufferpool upstream;
			 * hence the is_active check */
			if (!gst_buffer_pool_is_active(vpu_base_enc->internal_bufferpool))
				gst_buffer_pool_set_active(vpu_base_enc->internal_bufferpool, TRUE);

			/* Create the internal input buffer */
			flow_ret = gst_buffer_pool_acquire_buffer(vpu_base_enc->internal_bufferpool, &(vpu_base_enc->internal_input_buffer), NULL);
			if (flow_ret != GST_FLOW_OK)
			{
				GST_ERROR_OBJECT(vpu_base_enc, "error acquiring input frame buffer: %s", gst_pad_mode_get_name(flow_ret));
				return flow_ret;
			}
		}

		/* The internal input buffer exists at this point. Since the incoming frame
		 * is not stored in physical memory, copy its pixels to the internal
		 * input buffer, so the encoder can read them. */

		gst_video_frame_map(&temp_incoming_video_frame, &(vpu_base_enc->video_info), frame->input_buffer, GST_MAP_READ);
		gst_video_frame_map(&temp_input_video_frame, &(vpu_base_enc->video_info), vpu_base_enc->internal_input_buffer, GST_MAP_WRITE);

		gst_video_frame_copy(&temp_input_video_frame, &temp_incoming_video_frame);

		gst_video_frame_unmap(&temp_incoming_video_frame);
		gst_video_frame_unmap(&temp_input_video_frame);

		/* Set the internal input buffer as the encoder's input */
		input_buffer = vpu_base_enc->internal_input_buffer;
		/* And use the internal input buffer's physical memory metadata */
		phys_mem_meta = GST_IMX_PHYS_MEM_META_GET(vpu_base_enc->internal_input_buffer);
	}
	else
	{
		/* Physical memory metadata found -> buffer is physically contiguous
		 * It can be used directly as input for the VPU encoder */
		input_buffer = frame->input_buffer;
	}

	/* Set up physical addresses for the input framebuffer */
	{
		gsize *plane_offsets;
		gint *plane_strides;
		GstVideoMeta *video_meta;
		unsigned char *phys_ptr;

		/* Try to use plane offset and stride information from the video
		 * metadata if present, since these can be more accurate than
		 * the information from the video info */
		video_meta = gst_buffer_get_video_meta(input_buffer);
		if (video_meta != NULL)
		{
			plane_offsets = video_meta->offset;
			plane_strides = video_meta->stride;
		}
		else
		{
			plane_offsets = vpu_base_enc->video_info.offset;
			plane_strides = vpu_base_enc->video_info.stride;
		}

		phys_ptr = (unsigned char*)(phys_mem_meta->phys_addr);

		input_framebuf.pbufY = phys_ptr;
		input_framebuf.pbufCb = phys_ptr + plane_offsets[1];
		input_framebuf.pbufCr = phys_ptr + plane_offsets[2];
		input_framebuf.pbufMvCol = NULL; /* not used by the VPU encoder */
		input_framebuf.nStrideY = plane_strides[0];
		input_framebuf.nStrideC = plane_strides[1];

		/* this is needed for framebuffers registration below */
		src_stride = plane_strides[0];

		GST_TRACE_OBJECT(vpu_base_enc, "width: %d   height: %d   stride 0: %d   stride 1: %d   offset 0: %d   offset 1: %d   offset 2: %d", GST_VIDEO_INFO_WIDTH(&(vpu_base_enc->video_info)), GST_VIDEO_INFO_HEIGHT(&(vpu_base_enc->video_info)), plane_strides[0], plane_strides[1], plane_offsets[0], plane_offsets[1], plane_offsets[2]);
	}

	/* Create framebuffers structure (if not already present) */
	if (vpu_base_enc->framebuffers == NULL)
	{
		GstImxVpuFramebufferParams fbparams;
		gst_imx_vpu_framebuffers_enc_init_info_to_params(&(vpu_base_enc->init_info), &fbparams);
		fbparams.pic_width = vpu_base_enc->open_param.nPicWidth;
		fbparams.pic_height = vpu_base_enc->open_param.nPicHeight;

		vpu_base_enc->framebuffers = gst_imx_vpu_framebuffers_new(&fbparams, gst_imx_vpu_enc_allocator_obtain());
		if (vpu_base_enc->framebuffers == NULL)
		{
			GST_ELEMENT_ERROR(vpu_base_enc, RESOURCE, NO_SPACE_LEFT, ("could not create framebuffers structure"), (NULL));
			return GST_FLOW_ERROR;
		}

		gst_imx_vpu_framebuffers_register_with_encoder(vpu_base_enc->framebuffers, vpu_base_enc->handle, src_stride);
	}

	/* Allocate physical buffer for output data (if not already present) */
	if (vpu_base_enc->output_phys_buffer == NULL)
	{
		vpu_base_enc->output_phys_buffer = (GstImxPhysMemory *)gst_allocator_alloc(gst_imx_vpu_enc_allocator_obtain(), vpu_base_enc->framebuffers->total_size, NULL);

		if (vpu_base_enc->output_phys_buffer == NULL)
		{
			GST_ERROR_OBJECT(vpu_base_enc, "could not allocate physical buffer for output data");
			return GST_FLOW_ERROR;
		}
	}

	/* Set up encoding parameters */
	enc_enc_param.nInVirtOutput = (unsigned int)(vpu_base_enc->output_phys_buffer->mapped_virt_addr); /* TODO */
	enc_enc_param.nInPhyOutput = (unsigned int)(vpu_base_enc->output_phys_buffer->phys_addr);
	enc_enc_param.nInOutputBufLen = vpu_base_enc->output_phys_buffer->mem.size;
	enc_enc_param.nPicWidth = vpu_base_enc->framebuffers->pic_width;
	enc_enc_param.nPicHeight = vpu_base_enc->framebuffers->pic_height;
	enc_enc_param.nFrameRate = vpu_base_enc->open_param.nFrameRate;
	enc_enc_param.pInFrame = &input_framebuf;
	enc_enc_param.nForceIPicture = 0;

	/* Force I-frame if either IS_FORCE_KEYFRAME or IS_FORCE_KEYFRAME_HEADERS is set for the current frame. */
	if (GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME(frame) || GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME_HEADERS(frame))
	{
		enc_enc_param.nForceIPicture = 1;
		GST_LOG_OBJECT(vpu_base_enc, "got request to make this a keyframe - forcing I frame");
		GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT(frame);
	}

	/* Give the derived class a chance to set up encoding parameters too */
	if (!klass->set_frame_enc_params(vpu_base_enc, &enc_enc_param, &(vpu_base_enc->open_param)))
	{
		GST_ERROR_OBJECT(vpu_base_enc, "derived class could not frame enc params");
		return GST_FLOW_ERROR;
	}

	/* Main encoding block */
	{
		GstBuffer *output_buffer = NULL;
		gsize output_buffer_offset = 0;
		gboolean frame_finished = FALSE;

		frame->output_buffer = NULL;

		/* Run in a loop until the VPU reports the input as used */
		do
		{
			/* Feed input data */
			enc_ret = VPU_EncEncodeFrame(vpu_base_enc->handle, &enc_enc_param);
			if (enc_ret != VPU_ENC_RET_SUCCESS)
			{
				GST_ERROR_OBJECT(vpu_base_enc, "failed to encode frame: %s", gst_imx_vpu_strerror(enc_ret));
				VPU_EncReset(vpu_base_enc->handle);
				return GST_FLOW_ERROR;
			}

			if (frame_finished)
			{
				GST_WARNING_OBJECT(vpu_base_enc, "frame was already finished for the current input, but input not yet marked as used");
				continue;
			}

			if (enc_enc_param.eOutRetCode & (VPU_ENC_OUTPUT_DIS | VPU_ENC_OUTPUT_SEQHEADER))
			{
				/* Create an output buffer on demand */
				if (output_buffer == NULL)
				{
					output_buffer = gst_video_encoder_allocate_output_buffer(
						encoder,
						vpu_base_enc->output_phys_buffer->mem.size
					);
					frame->output_buffer = output_buffer;
				}

				GST_LOG_OBJECT(vpu_base_enc, "processing output data: %u bytes, output buffer offset %u", enc_enc_param.nOutOutputSize, output_buffer_offset);

				if (klass->fill_output_buffer != NULL)
				{
					/* Derived class fills data on its own */

					gsize cur_offset = output_buffer_offset;
					output_buffer_offset += klass->fill_output_buffer(
						vpu_base_enc,
						frame,
						cur_offset,
						vpu_base_enc->output_phys_buffer->mapped_virt_addr,
						enc_enc_param.nOutOutputSize,
						enc_enc_param.eOutRetCode & VPU_ENC_OUTPUT_SEQHEADER
					);
				}
				else
				{
					/* Use default data filling (= copy input to output) */

					gst_buffer_fill(
						output_buffer,
						output_buffer_offset,
						vpu_base_enc->output_phys_buffer->mapped_virt_addr,
						enc_enc_param.nOutOutputSize
					);
					output_buffer_offset += enc_enc_param.nOutOutputSize;
				}

				if (enc_enc_param.eOutRetCode & VPU_ENC_OUTPUT_DIS)
				{
					g_assert(output_buffer != NULL);

					/* Set the output buffer's size to the actual number of bytes
					 * filled by the derived class */
					gst_buffer_set_size(output_buffer, output_buffer_offset);

					/* Set the frame DTS */
					frame->dts = frame->pts;

					/* And finish the frame, handing the output data over to the base class */
					gst_video_encoder_finish_frame(encoder, frame);

					output_buffer = NULL;
					frame_finished = TRUE;

					if (!(enc_enc_param.eOutRetCode & VPU_ENC_INPUT_USED))
						GST_WARNING_OBJECT(vpu_base_enc, "frame finished, but VPU did not report the input as used");

					break;
				}
			}
		}
		while (!(enc_enc_param.eOutRetCode & VPU_ENC_INPUT_USED)); /* VPU_ENC_INPUT_NOT_USED has value 0x0 - cannot use it for flag checks */

		/* If output_buffer is NULL at this point, it means VPU_ENC_OUTPUT_DIS was never communicated
		 * by the VPU, and the buffer is unfinished. -> Drop it. */
		if (output_buffer != NULL)
		{
			GST_WARNING_OBJECT(vpu_base_enc, "frame unfinished ; dropping");
			gst_buffer_unref(output_buffer);
			frame->output_buffer = NULL; /* necessary to make finish_frame() drop the frame */
			gst_video_encoder_finish_frame(encoder, frame);
		}
	}

	return GST_FLOW_OK;
}