Beispiel #1
0
ImxVpuEncReturnCodes imx_vpu_jpeg_enc_encode(ImxVpuJPEGEncoder *jpeg_encoder, ImxVpuFramebuffer const *framebuffer, ImxVpuJPEGEncParams const *params, void **acquired_handle, size_t *output_buffer_size)
{
	unsigned int output_code;
	ImxVpuEncParams enc_params;
	ImxVpuEncReturnCodes ret;
	ImxVpuRawFrame raw_frame;
	ImxVpuEncodedFrame encoded_frame;

	assert(framebuffer != NULL);
	assert(params != NULL);
	assert(jpeg_encoder != NULL);

	if (acquired_handle != NULL)
		*acquired_handle = NULL;

	if ((jpeg_encoder->encoder == NULL)
	 || (jpeg_encoder->frame_width != params->frame_width)
	 || (jpeg_encoder->frame_height != params->frame_height)
	 || (jpeg_encoder->quality_factor != params->quality_factor)
	 || (jpeg_encoder->color_format != params->color_format)
	)
	{
		imx_vpu_jpeg_enc_close_internal(jpeg_encoder);

		jpeg_encoder->frame_width = params->frame_width;
		jpeg_encoder->frame_height = params->frame_height;
		jpeg_encoder->quality_factor = params->quality_factor;
		jpeg_encoder->color_format = params->color_format;

		if ((ret = imx_vpu_jpeg_enc_open_internal(jpeg_encoder)) != IMX_VPU_ENC_RETURN_CODE_OK)
			return ret;
	}

	memset(&enc_params, 0, sizeof(enc_params));
	enc_params.acquire_output_buffer = params->acquire_output_buffer;
	enc_params.finish_output_buffer = params->finish_output_buffer;
	enc_params.output_buffer_context = params->output_buffer_context;

	memset(&raw_frame, 0, sizeof(raw_frame));
	raw_frame.framebuffer = (ImxVpuFramebuffer *)framebuffer;
	
	memset(&encoded_frame, 0, sizeof(encoded_frame));

	ret = imx_vpu_enc_encode(jpeg_encoder->encoder, &raw_frame, &encoded_frame, &enc_params, &output_code);

	if (acquired_handle != NULL)
		*acquired_handle = encoded_frame.acquired_handle;
	if (output_buffer_size != NULL)
		*output_buffer_size = encoded_frame.data_size;

	return ret;
}
Retval run(Context *ctx)
{
	ImxVpuPicture input_picture;
	ImxVpuEncodedFrame output_frame;
	ImxVpuEncParams enc_params;
	unsigned int output_code;

	memset(&input_picture, 0, sizeof(input_picture));
	input_picture.framebuffer = &(ctx->input_framebuffer);

	memset(&enc_params, 0, sizeof(enc_params));
	enc_params.frame_width = FRAME_WIDTH;
	enc_params.frame_height = FRAME_HEIGHT;
	enc_params.framerate = FPS;
	enc_params.quant_param = 0;

	memset(&output_frame, 0, sizeof(output_frame));
	output_frame.data.dma_buffer = ctx->output_dmabuffer;

	/* Read input i420 frames and encode them until the end of the input file is reached */
	for (;;)
	{
		uint8_t *mapped_virtual_address;

		/* Read uncompressed pixels into the input DMA buffer */
		mapped_virtual_address = imx_vpu_dma_buffer_map(ctx->input_fb_dmabuffer, IMX_VPU_MAPPING_FLAG_WRITE_ONLY);
		fread(mapped_virtual_address, 1, FRAME_SIZE, ctx->fin);
		imx_vpu_dma_buffer_unmap(ctx->input_fb_dmabuffer);

		/* Stop encoding if EOF was reached */
		if (feof(ctx->fin))
			break;

		/* The actual encoding */
		imx_vpu_enc_encode(ctx->vpuenc, &input_picture, &output_frame, &enc_params, &output_code);

		/* Write out the encoded frame to the output file */
		mapped_virtual_address = imx_vpu_dma_buffer_map(ctx->output_dmabuffer, IMX_VPU_MAPPING_FLAG_READ_ONLY);
		fwrite(mapped_virtual_address, 1, output_frame.data_size, ctx->fout);
		imx_vpu_dma_buffer_unmap(ctx->input_fb_dmabuffer);
	}

	return RETVAL_OK;
}
Retval run(Context *ctx)
{
	ImxVpuRawFrame input_frame;
	ImxVpuEncodedFrame output_frame;
	ImxVpuEncParams enc_params;
	unsigned int output_code;

	/* Set up the input frame. The only field that needs to be
	 * set is the input framebuffer. The encoder will read from it.
	 * The rest can remain zero/NULL. */
	memset(&input_frame, 0, sizeof(input_frame));
	input_frame.framebuffer = &(ctx->input_framebuffer);

	/* Set the encoding parameters for this frame. quant_param 0 is
	 * the highest quality in h.264 constant quality encoding mode.
	 * (The range in h.264 is 0-51, where 0 is best quality and worst
	 * compression, and 51 vice versa.) */
	memset(&enc_params, 0, sizeof(enc_params));
	enc_params.quant_param = 0;
	enc_params.acquire_output_buffer = acquire_output_buffer;
	enc_params.finish_output_buffer = finish_output_buffer;
	enc_params.output_buffer_context = NULL;

	/* Set up the output frame. Simply setting all fields to zero/NULL
	 * is enough. The encoder will fill in data. */
	memset(&output_frame, 0, sizeof(output_frame));

	/* Read input i420 frames and encode them until the end of the input file is reached */
	for (;;)
	{
		uint8_t *mapped_virtual_address;
		void *output_block;

		/* Read uncompressed pixels into the input DMA buffer */
		mapped_virtual_address = imx_vpu_dma_buffer_map(ctx->input_fb_dmabuffer, IMX_VPU_MAPPING_FLAG_WRITE);
		fread(mapped_virtual_address, 1, FRAME_SIZE, ctx->fin);
		imx_vpu_dma_buffer_unmap(ctx->input_fb_dmabuffer);

		/* Stop encoding if EOF was reached */
		if (feof(ctx->fin))
			break;

		/* The actual encoding */
		imx_vpu_enc_encode(ctx->vpuenc, &input_frame, &output_frame, &enc_params, &output_code);

		/* Write out the encoded frame to the output file. The encoder
		 * will have called acquire_output_buffer(), which acquires a
		 * buffer by malloc'ing it. The "handle" in this example is
		 * just the pointer to the allocated memory. This means that
		 * here, acquired_handle is the pointer to the encoded frame
		 * data. Write it to file, and then free the previously
		 * allocated block. In production, the acquire function could
		 * retrieve an output memory block from a buffer pool for
		 * example. */
		output_block = output_frame.acquired_handle;
		fwrite(output_block, 1, output_frame.data_size, ctx->fout);
		free(output_block);
	}

	return RETVAL_OK;
}
Beispiel #4
0
static GstFlowReturn gst_imx_vpu_encoder_base_handle_frame(GstVideoEncoder *encoder, GstVideoCodecFrame *input_frame)
{
	GstImxPhysMemMeta *phys_mem_meta;
	GstImxVpuEncoderBaseClass *klass;
	GstImxVpuEncoderBase *vpu_encoder_base;
	GstBuffer *input_buffer;
	ImxVpuEncParams enc_params;

	vpu_encoder_base = GST_IMX_VPU_ENCODER_BASE(encoder);
	klass = GST_IMX_VPU_ENCODER_BASE_CLASS(G_OBJECT_GET_CLASS(vpu_encoder_base));

	if (vpu_encoder_base->drop)
	{
		input_frame->output_buffer = NULL; /* necessary to make finish_frame() drop the frame */
		gst_video_encoder_finish_frame(encoder, input_frame);
		return GST_FLOW_OK;
	}

	/* Get access to the input buffer's physical address */

	phys_mem_meta = GST_IMX_PHYS_MEM_META_GET(input_frame->input_buffer);

	/* If the incoming frame's buffer is not using physically contiguous memory,
	 * it needs to be copied to the internal input buffer, otherwise the VPU
	 * encoder cannot read the frame */
	if (phys_mem_meta == NULL)
	{
		/* No physical memory metadata found -> buffer is not physically contiguous */

		GstVideoFrame temp_input_video_frame, temp_incoming_video_frame;

		GST_LOG_OBJECT(vpu_encoder_base, "input buffer not physically contiguous - frame copy is necessary");

		if (vpu_encoder_base->internal_input_buffer == NULL)
		{
			/* The internal input buffer is the temp input frame's DMA memory.
			 * If it does not exist yet, it needs to be created here. The temp input
			 * frame is then mapped. */

			GstFlowReturn flow_ret;

			if (vpu_encoder_base->internal_input_bufferpool == NULL)
			{
				/* Internal bufferpool does not exist yet - create it now,
				 * so that it can in turn create the internal input buffer */

				GstStructure *config;
				GstCaps *caps;

				GST_DEBUG_OBJECT(vpu_encoder_base, "creating internal bufferpool");

				caps = gst_video_info_to_caps(&(vpu_encoder_base->video_info));
				vpu_encoder_base->internal_input_bufferpool = gst_imx_phys_mem_buffer_pool_new(FALSE);

				gst_object_ref(vpu_encoder_base->phys_mem_allocator);

				config = gst_buffer_pool_get_config(vpu_encoder_base->internal_input_bufferpool);
				gst_buffer_pool_config_set_params(config, caps, vpu_encoder_base->video_info.size, 2, 0);
				gst_buffer_pool_config_set_allocator(config, vpu_encoder_base->phys_mem_allocator, NULL);
				gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_IMX_PHYS_MEM);
				gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_VIDEO_META);
				gst_buffer_pool_set_config(vpu_encoder_base->internal_input_bufferpool, config);

				gst_caps_unref(caps);

				if (vpu_encoder_base->internal_input_bufferpool == NULL)
				{
					GST_ERROR_OBJECT(vpu_encoder_base, "failed to create internal bufferpool");
					return GST_FLOW_ERROR;
				}
			}

			/* Future versions of this code may propose the internal bufferpool upstream;
			 * hence the is_active check */
			if (!gst_buffer_pool_is_active(vpu_encoder_base->internal_input_bufferpool))
				gst_buffer_pool_set_active(vpu_encoder_base->internal_input_bufferpool, TRUE);

			/* Create the internal input buffer */
			flow_ret = gst_buffer_pool_acquire_buffer(vpu_encoder_base->internal_input_bufferpool, &(vpu_encoder_base->internal_input_buffer), NULL);
			if (flow_ret != GST_FLOW_OK)
			{
				GST_ERROR_OBJECT(vpu_encoder_base, "error acquiring input frame buffer: %s", gst_pad_mode_get_name(flow_ret));
				return flow_ret;
			}
		}

		/* The internal input buffer exists at this point. Since the incoming frame
		 * is not stored in physical memory, copy its pixels to the internal
		 * input buffer, so the encoder can read them. */

		gst_video_frame_map(&temp_incoming_video_frame, &(vpu_encoder_base->video_info), input_frame->input_buffer, GST_MAP_READ);
		gst_video_frame_map(&temp_input_video_frame, &(vpu_encoder_base->video_info), vpu_encoder_base->internal_input_buffer, GST_MAP_WRITE);

		gst_video_frame_copy(&temp_input_video_frame, &temp_incoming_video_frame);

		gst_video_frame_unmap(&temp_incoming_video_frame);
		gst_video_frame_unmap(&temp_input_video_frame);

		/* Set the input buffer as the encoder's input */
		input_buffer = vpu_encoder_base->internal_input_buffer;
		/* And use the input buffer's physical memory metadata */
		phys_mem_meta = GST_IMX_PHYS_MEM_META_GET(vpu_encoder_base->internal_input_buffer);
	}
	else
	{
		/* Physical memory metadata found -> buffer is physically contiguous
		 * It can be used directly as input for the VPU encoder */
		input_buffer = input_frame->input_buffer;
	}


	/* Prepare the input buffer's information (strides, plane offsets ..) for encoding */

	{
		GstVideoMeta *video_meta;

		/* Try to use plane offset and stride information from the video
		 * metadata if present, since these can be more accurate than
		 * the information from the video info */
		video_meta = gst_buffer_get_video_meta(input_buffer);
		if (video_meta != NULL)
		{
			vpu_encoder_base->input_framebuffer.y_stride = video_meta->stride[0];
			vpu_encoder_base->input_framebuffer.cbcr_stride = video_meta->stride[1];

			vpu_encoder_base->input_framebuffer.y_offset = video_meta->offset[0];
			vpu_encoder_base->input_framebuffer.cb_offset = video_meta->offset[1];
			vpu_encoder_base->input_framebuffer.cr_offset = video_meta->offset[2];
		}
		else
		{
			vpu_encoder_base->input_framebuffer.y_stride = GST_VIDEO_INFO_PLANE_STRIDE(&(vpu_encoder_base->video_info), 0);
			vpu_encoder_base->input_framebuffer.cbcr_stride = GST_VIDEO_INFO_PLANE_STRIDE(&(vpu_encoder_base->video_info), 1);

			vpu_encoder_base->input_framebuffer.y_offset = GST_VIDEO_INFO_PLANE_OFFSET(&(vpu_encoder_base->video_info), 0);
			vpu_encoder_base->input_framebuffer.cb_offset = GST_VIDEO_INFO_PLANE_OFFSET(&(vpu_encoder_base->video_info), 1);
			vpu_encoder_base->input_framebuffer.cr_offset = GST_VIDEO_INFO_PLANE_OFFSET(&(vpu_encoder_base->video_info), 2);
		}

		vpu_encoder_base->input_framebuffer.mvcol_offset = 0; /* this is not used by the encoder */
		vpu_encoder_base->input_framebuffer.context = (void *)(input_frame->system_frame_number);

		vpu_encoder_base->input_dmabuffer.fd = -1;
		vpu_encoder_base->input_dmabuffer.physical_address = phys_mem_meta->phys_addr;
		vpu_encoder_base->input_dmabuffer.size = gst_buffer_get_size(input_buffer);
	}


	/* Prepare the encoding parameters */

	memset(&enc_params, 0, sizeof(enc_params));
	imx_vpu_enc_set_default_encoding_params(vpu_encoder_base->encoder, &enc_params);
	enc_params.force_I_frame = 0;
	enc_params.acquire_output_buffer = gst_imx_vpu_encoder_base_acquire_output_buffer;
	enc_params.finish_output_buffer = gst_imx_vpu_encoder_base_finish_output_buffer;
	enc_params.output_buffer_context = vpu_encoder_base;

	/* Force I-frame if either IS_FORCE_KEYFRAME or IS_FORCE_KEYFRAME_HEADERS is set for the current frame. */
	if (GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME(input_frame) || GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME_HEADERS(input_frame))
	{
		enc_params.force_I_frame = 1;
		GST_LOG_OBJECT(vpu_encoder_base, "got request to make this a keyframe - forcing I frame");
		GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT(input_frame);
	}

	/* Give the derived class a chance to set up encoding parameters too */
	if ((klass->set_frame_enc_params != NULL) && !klass->set_frame_enc_params(vpu_encoder_base, &enc_params))
	{
		GST_ERROR_OBJECT(vpu_encoder_base, "derived class could not frame enc params");
		return GST_FLOW_ERROR;
	}


	/* Main encoding block */
	{
		ImxVpuEncReturnCodes enc_ret;
		unsigned int output_code = 0;
		ImxVpuEncodedFrame encoded_data_frame;

		vpu_encoder_base->output_buffer = NULL;

		/* The actual encoding call */
		memset(&encoded_data_frame, 0, sizeof(ImxVpuEncodedFrame));
		enc_ret = imx_vpu_enc_encode(vpu_encoder_base->encoder, &(vpu_encoder_base->input_frame), &encoded_data_frame, &enc_params, &output_code);
		if (enc_ret != IMX_VPU_ENC_RETURN_CODE_OK)
		{
			GST_ERROR_OBJECT(vpu_encoder_base, "failed to encode frame: %s", imx_vpu_enc_error_string(enc_ret));
			if (vpu_encoder_base->output_buffer != NULL)
				gst_buffer_unref(vpu_encoder_base->output_buffer);
			return GST_FLOW_ERROR;
		}

		/* Give the derived class a chance to process the output_block_buffer */
		if ((klass->process_output_buffer != NULL) && !klass->process_output_buffer(vpu_encoder_base, input_frame, &(vpu_encoder_base->output_buffer)))
		{
			GST_ERROR_OBJECT(vpu_encoder_base, "derived class reports failure while processing encoded output");
			if (vpu_encoder_base->output_buffer != NULL)
				gst_buffer_unref(vpu_encoder_base->output_buffer);
			return GST_FLOW_ERROR;
		}

		if (output_code & IMX_VPU_ENC_OUTPUT_CODE_ENCODED_FRAME_AVAILABLE)
		{
			GST_LOG_OBJECT(vpu_encoder_base, "VPU outputs encoded frame");

			/* TODO: make use of the frame context that is retrieved with get_frame(i)
			 * This is not strictly necessary, since the VPU encoder does not
			 * do frame reordering, nor does it produce delays, but it would
			 * be a bit cleaner. */

			input_frame->dts = input_frame->pts;

			/* Take all of the encoded bits. The adapter contains an encoded frame
			 * at this point. */
			input_frame->output_buffer = vpu_encoder_base->output_buffer;

			/* And finish the frame, handing the output data over to the base class */
			gst_video_encoder_finish_frame(encoder, input_frame);
		}
		else
		{
			/* If at this point IMX_VPU_ENC_OUTPUT_CODE_ENCODED_FRAME_AVAILABLE is not set
			 * in the output_code, it means the input was used up before a frame could be
			 * encoded. Therefore, no output frame can be pushed downstream. Note that this
			 * should not happen during normal operation, so a warning is logged. */

			if (vpu_encoder_base->output_buffer != NULL)
				gst_buffer_unref(vpu_encoder_base->output_buffer);

			GST_WARNING_OBJECT(vpu_encoder_base, "frame unfinished ; dropping");
			input_frame->output_buffer = NULL; /* necessary to make finish_frame() drop the frame */
			gst_video_encoder_finish_frame(encoder, input_frame);
		}
	}


	return GST_FLOW_OK;
}