static GstFlowReturn gst_imx_blitter_video_transform_transform_frame(GstBaseTransform *transform, GstBuffer *in, GstBuffer *out)
{
	gboolean ret;
	GstImxBlitterVideoTransform *blitter_video_transform = GST_IMX_BLITTER_VIDEO_TRANSFORM(transform);

	g_assert(blitter_video_transform->blitter != NULL);

	if (!blitter_video_transform->inout_info_set)
	{
		GST_ELEMENT_ERROR(transform, CORE, NOT_IMPLEMENTED, (NULL), ("unknown format"));
		return GST_FLOW_NOT_NEGOTIATED;
	}

	if (in == out)
	{
		GST_LOG_OBJECT(transform, "passing buffer through");
		return GST_FLOW_OK;
	}

	GST_IMX_BLITTER_VIDEO_TRANSFORM_LOCK(blitter_video_transform);

	ret = TRUE;
	ret = ret && gst_imx_base_blitter_set_input_buffer(blitter_video_transform->blitter, in);
	ret = ret && gst_imx_base_blitter_set_output_buffer(blitter_video_transform->blitter, out);
	ret = ret && gst_imx_base_blitter_blit(blitter_video_transform->blitter);

	GST_IMX_BLITTER_VIDEO_TRANSFORM_UNLOCK(blitter_video_transform);

	return ret ? GST_FLOW_OK : GST_FLOW_ERROR;
}
static gboolean gst_imx_blitter_video_transform_sink_event(GstBaseTransform *transform, GstEvent *event)
{
	GstImxBlitterVideoTransform *blitter_video_transform = GST_IMX_BLITTER_VIDEO_TRANSFORM(transform);

	switch (GST_EVENT_TYPE(event))
	{
		case GST_EVENT_FLUSH_STOP:
		{
			GST_IMX_BLITTER_VIDEO_TRANSFORM_LOCK(blitter_video_transform);
			if (blitter_video_transform->blitter != NULL)
			{
				if (!gst_imx_base_blitter_flush(blitter_video_transform->blitter))
					GST_WARNING_OBJECT(transform, "could not flush blitter");
			}
			GST_IMX_BLITTER_VIDEO_TRANSFORM_UNLOCK(blitter_video_transform);

			break;
		}

		default:
			break;
	}

	return GST_BASE_TRANSFORM_CLASS(gst_imx_blitter_video_transform_parent_class)->sink_event(transform, event);
}
static void gst_imx_blitter_video_transform_finalize(GObject *object)
{
	GstImxBlitterVideoTransform *blitter_video_transform = GST_IMX_BLITTER_VIDEO_TRANSFORM(object);

	g_mutex_clear(&(blitter_video_transform->mutex));

	G_OBJECT_CLASS(gst_imx_blitter_video_transform_parent_class)->finalize(object);
}
Exemplo n.º 4
0
static gboolean gst_imx_blitter_video_transform_set_caps(GstBaseTransform *transform, GstCaps *in, GstCaps *out)
{
	gboolean inout_info_equal;
	GstVideoInfo in_info, out_info;
	GstImxBlitterVideoTransform *blitter_video_transform = GST_IMX_BLITTER_VIDEO_TRANSFORM(transform);
	GstImxBlitterVideoTransformClass *klass = GST_IMX_BLITTER_VIDEO_TRANSFORM_CLASS(G_OBJECT_GET_CLASS(transform));
	GstImxCanvas *canvas = &(blitter_video_transform->canvas);
	GstImxRegion source_subset;

	g_assert(klass->are_video_infos_equal != NULL);
	g_assert(blitter_video_transform->blitter != NULL);

	if (!gst_video_info_from_caps(&in_info, in) || !gst_video_info_from_caps(&out_info, out))
	{
		GST_ERROR_OBJECT(transform, "caps are invalid");
		blitter_video_transform->inout_info_set = FALSE;
		return FALSE;
	}

	inout_info_equal = klass->are_video_infos_equal(blitter_video_transform, &in_info, &out_info);

	if (inout_info_equal)
		GST_DEBUG_OBJECT(transform, "input and output caps are equal");
	else
		GST_DEBUG_OBJECT(transform, "input and output caps are not equal:  input: %" GST_PTR_FORMAT "  output: %" GST_PTR_FORMAT, (gpointer)in, (gpointer)out);

	gst_imx_blitter_set_input_video_info(blitter_video_transform->blitter, &in_info);
	gst_imx_blitter_set_output_video_info(blitter_video_transform->blitter, &out_info);

	/* setting new caps changes the canvas, so recalculate it
	 * the recalculation here is done without any input cropping, so set
	 * last_frame_with_cropdata to FALSE, in case subsequent frames do
	 * contain crop metadata */

	blitter_video_transform->last_frame_with_cropdata = FALSE;

	/* the canvas always encompasses the entire output frame */
	canvas->outer_region.x1 = 0;
	canvas->outer_region.y1 = 0;
	canvas->outer_region.x2 = GST_VIDEO_INFO_WIDTH(&out_info);
	canvas->outer_region.y2 = GST_VIDEO_INFO_HEIGHT(&out_info);

	gst_imx_canvas_calculate_inner_region(canvas, &in_info);
	gst_imx_canvas_clip(canvas, &(canvas->outer_region), &in_info, NULL, &source_subset);

	gst_imx_blitter_set_input_region(blitter_video_transform->blitter, &source_subset);
	gst_imx_blitter_set_output_canvas(blitter_video_transform->blitter, canvas);

	blitter_video_transform->input_video_info = in_info;
	blitter_video_transform->output_video_info = out_info;
	blitter_video_transform->inout_info_equal = inout_info_equal;
	blitter_video_transform->inout_info_set = TRUE;

	return TRUE;
}
Exemplo n.º 5
0
static gboolean gst_imx_blitter_video_transform_src_event(GstBaseTransform *transform, GstEvent *event)
{
	gdouble a;
	GstStructure *structure;
	GstImxBlitterVideoTransform *blitter_video_transform = GST_IMX_BLITTER_VIDEO_TRANSFORM(transform);

	GST_DEBUG_OBJECT(transform, "handling %s event", GST_EVENT_TYPE_NAME(event));

	switch (GST_EVENT_TYPE(event))
	{
		case GST_EVENT_NAVIGATION:
		{
			/* scale pointer_x/y values in the event if in- and output have different width/height */

			gint in_w = GST_VIDEO_INFO_WIDTH(&(blitter_video_transform->input_video_info));
			gint in_h = GST_VIDEO_INFO_HEIGHT(&(blitter_video_transform->input_video_info));
			gint out_w = GST_VIDEO_INFO_WIDTH(&(blitter_video_transform->output_video_info));
			gint out_h = GST_VIDEO_INFO_HEIGHT(&(blitter_video_transform->output_video_info));
			if ((in_w != out_w) || (in_h != out_h))
			{
				event = GST_EVENT(gst_mini_object_make_writable(GST_MINI_OBJECT(event)));

				structure = (GstStructure *)gst_event_get_structure(event);
				if (gst_structure_get_double(structure, "pointer_x", &a))
				{
					gst_structure_set(
						structure,
						"pointer_x",
						G_TYPE_DOUBLE,
						a * in_w / out_w,
						NULL
					);
				}
				if (gst_structure_get_double(structure, "pointer_y", &a))
				{
					gst_structure_set(
						structure,
						"pointer_y",
						G_TYPE_DOUBLE,
						a * in_h / out_h,
						NULL
					);
				}
			}
			break;
		}

		default:
			break;
	}

	return GST_BASE_TRANSFORM_CLASS(gst_imx_blitter_video_transform_parent_class)->src_event(transform, event);
}
static void gst_imx_blitter_video_transform_get_property(GObject *object, guint prop_id, GValue *value, GParamSpec *pspec)
{
	GstImxBlitterVideoTransform *blitter_video_transform = GST_IMX_BLITTER_VIDEO_TRANSFORM(object);

	switch (prop_id)
	{
		case PROP_INPUT_CROP:
			GST_IMX_BLITTER_VIDEO_TRANSFORM_LOCK(blitter_video_transform);
			g_value_set_boolean(value, blitter_video_transform->input_crop);
			GST_IMX_BLITTER_VIDEO_TRANSFORM_UNLOCK(blitter_video_transform);
			break;

		default:
			G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
			break;
	}
}
static gboolean gst_imx_blitter_video_transform_set_caps(GstBaseTransform *transform, GstCaps *in, GstCaps *out)
{
	gboolean inout_info_equal;
	GstVideoInfo in_info, out_info;
	GstImxBlitterVideoTransform *blitter_video_transform = GST_IMX_BLITTER_VIDEO_TRANSFORM(transform);
	GstImxBlitterVideoTransformClass *klass = GST_IMX_BLITTER_VIDEO_TRANSFORM_CLASS(G_OBJECT_GET_CLASS(transform));

	g_assert(klass->are_video_infos_equal != NULL);
	g_assert(blitter_video_transform->blitter != NULL);

	if (!gst_video_info_from_caps(&in_info, in) || !gst_video_info_from_caps(&out_info, out))
	{
		GST_ERROR_OBJECT(transform, "caps are invalid");
		blitter_video_transform->inout_info_set = FALSE;
		return FALSE;
	}

	inout_info_equal = klass->are_video_infos_equal(blitter_video_transform, &in_info, &out_info);

	if (inout_info_equal)
		GST_DEBUG_OBJECT(transform, "input and output caps are equal");
	else
		GST_DEBUG_OBJECT(transform, "input and output caps are not equal:  input: %" GST_PTR_FORMAT "  output: %" GST_PTR_FORMAT, (gpointer)in, (gpointer)out);

	if (!gst_imx_base_blitter_set_input_video_info(blitter_video_transform->blitter, &in_info))
	{
		GST_ERROR_OBJECT(transform, "could not use input caps: %" GST_PTR_FORMAT, (gpointer)in);
		blitter_video_transform->inout_info_set = FALSE;
		return FALSE;
	}

	blitter_video_transform->input_video_info = in_info;
	blitter_video_transform->output_video_info = out_info;
	blitter_video_transform->inout_info_equal = inout_info_equal;
	blitter_video_transform->inout_info_set = TRUE;

	return TRUE;
}
static GstFlowReturn gst_imx_blitter_video_transform_prepare_output_buffer(GstBaseTransform *transform, GstBuffer *input, GstBuffer **outbuf)
{
	gboolean passthrough = FALSE;
	GstImxBlitterVideoTransform *blitter_video_transform = GST_IMX_BLITTER_VIDEO_TRANSFORM(transform);
	GstImxBlitterVideoTransformClass *klass = GST_IMX_BLITTER_VIDEO_TRANSFORM_CLASS(G_OBJECT_GET_CLASS(transform));

	g_assert(klass->are_transforms_necessary != NULL);

	GST_IMX_BLITTER_VIDEO_TRANSFORM_LOCK(blitter_video_transform);

	/* Test if passthrough should be enabled */
	if ((input != NULL) && blitter_video_transform->inout_info_equal)
	{
		/* if there is an input buffer and the input/output caps are equal,
		 * assume passthrough should be used, and test for exceptions where
		 * passthrough must not be enabled; such exceptions are transforms
		 * like rotation, deinterlacing ... these are defined by the
		 * derived video transform class */
		passthrough = !(klass->are_transforms_necessary(blitter_video_transform, input));
	}
	else if (!blitter_video_transform->inout_info_equal)
		GST_LOG_OBJECT(transform, "input and output caps are not equal");
	else if (input == NULL)
		GST_LOG_OBJECT(transform, "input buffer is NULL");

	GST_IMX_BLITTER_VIDEO_TRANSFORM_UNLOCK(blitter_video_transform);

	GST_LOG_OBJECT(transform, "passthrough: %s", passthrough ? "yes" : "no");

	if (passthrough)
	{
		*outbuf = input;
		return GST_FLOW_OK;
	}
	else
		return GST_BASE_TRANSFORM_CLASS(gst_imx_blitter_video_transform_parent_class)->prepare_output_buffer(transform, input, outbuf);
}
static GstStateChangeReturn gst_imx_blitter_video_transform_change_state(GstElement *element, GstStateChange transition)
{
	GstImxBlitterVideoTransform *blitter_video_transform = GST_IMX_BLITTER_VIDEO_TRANSFORM(element);
	GstImxBlitterVideoTransformClass *klass = GST_IMX_BLITTER_VIDEO_TRANSFORM_CLASS(G_OBJECT_GET_CLASS(element));
	GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;

	g_assert(blitter_video_transform != NULL);
	g_assert(klass->start != NULL);

	switch (transition)
	{
		case GST_STATE_CHANGE_NULL_TO_READY:
		{
			GST_IMX_BLITTER_VIDEO_TRANSFORM_LOCK(blitter_video_transform);

			blitter_video_transform->initialized = TRUE;

			if (!(klass->start(blitter_video_transform)))
			{
				GST_ERROR_OBJECT(blitter_video_transform, "start() failed");
				blitter_video_transform->initialized = FALSE;
				GST_IMX_BLITTER_VIDEO_TRANSFORM_UNLOCK(blitter_video_transform);
				return GST_STATE_CHANGE_FAILURE;
			}

			/* start() must call gst_imx_blitter_video_transform_set_blitter(),
			 * otherwise the video transform element cannot function properly */
			g_assert(blitter_video_transform->blitter != NULL);

			gst_imx_base_blitter_enable_crop(blitter_video_transform->blitter, blitter_video_transform->input_crop);

			GST_IMX_BLITTER_VIDEO_TRANSFORM_UNLOCK(blitter_video_transform);

			break;
		}

		default:
			break;
	}

	ret = GST_ELEMENT_CLASS(gst_imx_blitter_video_transform_parent_class)->change_state(element, transition);
	if (ret == GST_STATE_CHANGE_FAILURE)
		return ret;

	switch (transition)
	{
		case GST_STATE_CHANGE_READY_TO_NULL:
		{
			GST_IMX_BLITTER_VIDEO_TRANSFORM_LOCK(blitter_video_transform);

			blitter_video_transform->initialized = FALSE;

			if ((klass->stop != NULL) && !(klass->stop(blitter_video_transform)))
				GST_ERROR_OBJECT(blitter_video_transform, "stop() failed");

			GST_IMX_BLITTER_VIDEO_TRANSFORM_UNLOCK(blitter_video_transform);

			if (blitter_video_transform->blitter != NULL)
			{
				gst_object_unref(GST_OBJECT(blitter_video_transform->blitter));
				blitter_video_transform->blitter = NULL;
			}

			break;
		}

		default:
			break;
	}

	return ret;
}
Exemplo n.º 10
0
static gboolean gst_imx_blitter_video_transform_decide_allocation(GstBaseTransform *transform, GstQuery *query)
{
	GstImxBlitterVideoTransform *blitter_video_transform = GST_IMX_BLITTER_VIDEO_TRANSFORM(transform);
	GstCaps *outcaps;
	GstBufferPool *pool = NULL;
	guint size, min = 0, max = 0;
	GstStructure *config;
	GstVideoInfo vinfo;
	gboolean update_pool;

	g_assert(blitter_video_transform->blitter != NULL);

	gst_query_parse_allocation(query, &outcaps, NULL);
	gst_video_info_init(&vinfo);
	gst_video_info_from_caps(&vinfo, outcaps);

	GST_DEBUG_OBJECT(blitter_video_transform, "num allocation pools: %d", gst_query_get_n_allocation_pools(query));

	/* Look for an allocator which can allocate physical memory buffers */
	if (gst_query_get_n_allocation_pools(query) > 0)
	{
		for (guint i = 0; i < gst_query_get_n_allocation_pools(query); ++i)
		{
			gst_query_parse_nth_allocation_pool(query, i, &pool, &size, &min, &max);
			if (gst_buffer_pool_has_option(pool, GST_BUFFER_POOL_OPTION_IMX_PHYS_MEM))
				break;
		}

		size = MAX(size, vinfo.size);
		update_pool = TRUE;
	}
	else
	{
		pool = NULL;
		size = vinfo.size;
		min = max = 0;
		update_pool = FALSE;
	}

	/* Either no pool or no pool with the ability to allocate physical memory buffers
	 * has been found -> create a new pool */
	if ((pool == NULL) || !gst_buffer_pool_has_option(pool, GST_BUFFER_POOL_OPTION_IMX_PHYS_MEM))
	{
		if (pool == NULL)
			GST_DEBUG_OBJECT(blitter_video_transform, "no pool present; creating new pool");
		else
			GST_DEBUG_OBJECT(blitter_video_transform, "no pool supports physical memory buffers; creating new pool");
		pool = gst_imx_base_blitter_create_bufferpool(blitter_video_transform->blitter, outcaps, size, min, max, NULL, NULL);
	}
	else
	{
		config = gst_buffer_pool_get_config(pool);
		gst_buffer_pool_config_set_params(config, outcaps, size, min, max);
		gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_IMX_PHYS_MEM);
		gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_VIDEO_META);
		gst_buffer_pool_set_config(pool, config);
	}

	GST_DEBUG_OBJECT(
		blitter_video_transform,
		"pool config:  outcaps: %" GST_PTR_FORMAT "  size: %u  min buffers: %u  max buffers: %u",
		(gpointer)outcaps,
		size,
		min,
		max
	);

	if (update_pool)
		gst_query_set_nth_allocation_pool(query, 0, pool, size, min, max);
	else
		gst_query_add_allocation_pool(query, pool, size, min, max);

	if (pool != NULL)
		gst_object_unref(pool);

	return TRUE;
}
Exemplo n.º 11
0
static GstStateChangeReturn gst_imx_blitter_video_transform_change_state(GstElement *element, GstStateChange transition)
{
	GstImxBlitterVideoTransform *blitter_video_transform = GST_IMX_BLITTER_VIDEO_TRANSFORM(element);
	GstImxBlitterVideoTransformClass *klass = GST_IMX_BLITTER_VIDEO_TRANSFORM_CLASS(G_OBJECT_GET_CLASS(element));
	GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;

	g_assert(blitter_video_transform != NULL);

	switch (transition)
	{
		case GST_STATE_CHANGE_NULL_TO_READY:
		{
			GST_IMX_BLITTER_VIDEO_TRANSFORM_LOCK(blitter_video_transform);

			blitter_video_transform->initialized = TRUE;

			if ((klass->start != NULL) && !(klass->start(blitter_video_transform)))
			{
				GST_ERROR_OBJECT(blitter_video_transform, "start() failed");
				blitter_video_transform->initialized = FALSE;
				GST_IMX_BLITTER_VIDEO_TRANSFORM_UNLOCK(blitter_video_transform);
				return GST_STATE_CHANGE_FAILURE;
			}

			if (!gst_imx_blitter_video_transform_acquire_blitter(blitter_video_transform))
			{
				GST_ERROR_OBJECT(blitter_video_transform, "acquiring blitter failed");
				GST_IMX_BLITTER_VIDEO_TRANSFORM_UNLOCK(blitter_video_transform);
				return GST_STATE_CHANGE_FAILURE;
			}

			GST_IMX_BLITTER_VIDEO_TRANSFORM_UNLOCK(blitter_video_transform);

			break;
		}

		default:
			break;
	}

	ret = GST_ELEMENT_CLASS(gst_imx_blitter_video_transform_parent_class)->change_state(element, transition);
	if (ret == GST_STATE_CHANGE_FAILURE)
		return ret;

	switch (transition)
	{
		case GST_STATE_CHANGE_PAUSED_TO_READY:
		{
			GST_IMX_BLITTER_VIDEO_TRANSFORM_LOCK(blitter_video_transform);
			blitter_video_transform->last_frame_with_cropdata = FALSE;
			GST_IMX_BLITTER_VIDEO_TRANSFORM_UNLOCK(blitter_video_transform);
			break;
		}

		case GST_STATE_CHANGE_READY_TO_NULL:
		{
			GST_IMX_BLITTER_VIDEO_TRANSFORM_LOCK(blitter_video_transform);

			blitter_video_transform->initialized = FALSE;

			if ((klass->stop != NULL) && !(klass->stop(blitter_video_transform)))
				GST_ERROR_OBJECT(blitter_video_transform, "stop() failed");

			if (blitter_video_transform->blitter != NULL)
			{
				gst_object_unref(GST_OBJECT(blitter_video_transform->blitter));
				blitter_video_transform->blitter = NULL;
			}

			GST_IMX_BLITTER_VIDEO_TRANSFORM_UNLOCK(blitter_video_transform);

			break;
		}

		default:
			break;
	}

	return ret;
}
Exemplo n.º 12
0
static GstFlowReturn gst_imx_blitter_video_transform_prepare_output_buffer(GstBaseTransform *transform, GstBuffer *input, GstBuffer **outbuf)
{
	gboolean passthrough;
	GstImxBlitterVideoTransform *blitter_video_transform = GST_IMX_BLITTER_VIDEO_TRANSFORM(transform);
	GstImxBlitterVideoTransformClass *klass = GST_IMX_BLITTER_VIDEO_TRANSFORM_CLASS(G_OBJECT_GET_CLASS(transform));
	GstVideoCropMeta *video_crop_meta;
	gboolean update_canvas = FALSE;

	/* If either there is no input buffer or in- and output info are not equal,
	 * it is clear there can be no passthrough mode */
	passthrough = (input != NULL) && blitter_video_transform->inout_info_equal;

	GST_IMX_BLITTER_VIDEO_TRANSFORM_LOCK(blitter_video_transform);

	/* Check if cropping needs to be done */
	if ((input != NULL) && blitter_video_transform->input_crop && ((video_crop_meta = gst_buffer_get_video_crop_meta(input)) != NULL))
	{
		GstImxRegion source_region;
		gint in_width, in_height;

		source_region.x1 = video_crop_meta->x;
		source_region.y1 = video_crop_meta->y;
		source_region.x2 = video_crop_meta->x + video_crop_meta->width;
		source_region.y2 = video_crop_meta->y + video_crop_meta->height;

		in_width = GST_VIDEO_INFO_WIDTH(&(blitter_video_transform->input_video_info));
		in_height = GST_VIDEO_INFO_HEIGHT(&(blitter_video_transform->input_video_info));

		/* Make sure the source region does not exceed valid bounds */
		source_region.x1 = MAX(0, source_region.x1);
		source_region.y1 = MAX(0, source_region.y1);
		source_region.x2 = MIN(in_width, source_region.x2);
		source_region.y2 = MIN(in_height, source_region.y2);

		/* If the crop rectangle encompasses the entire frame, cropping is
		 * effectively a no-op, so make it passthrough in that case,
		 * unless passthrough is already FALSE */
		passthrough = passthrough && (source_region.x1 == 0) && (source_region.y1 == 0) && (source_region.x2 == in_width) && (source_region.y2 == in_height);

		GST_LOG_OBJECT(blitter_video_transform, "retrieved crop rectangle %" GST_IMX_REGION_FORMAT, GST_IMX_REGION_ARGS(&source_region));

		/* Canvas needs to be updated if either one of these applies:
		 * - the current frame has crop metadata, the last one didn't
		 * - the new crop rectangle and the last are different */
		if (!(blitter_video_transform->last_frame_with_cropdata) || !gst_imx_region_equal(&source_region, &(blitter_video_transform->last_source_region)))
		{
			GST_LOG_OBJECT(blitter_video_transform, "using new crop rectangle %" GST_IMX_REGION_FORMAT, GST_IMX_REGION_ARGS(&source_region));
			blitter_video_transform->last_source_region = source_region;
			update_canvas = TRUE;
		}

		blitter_video_transform->last_frame_with_cropdata = TRUE;
	}
	else
	{
		/* Force a canvas update if this frame has no crop metadata but the last one did */
		if (blitter_video_transform->last_frame_with_cropdata)
			update_canvas = TRUE;
		blitter_video_transform->last_frame_with_cropdata = FALSE;
	}

	if (update_canvas)
	{
		GstImxRegion source_subset;
		GstImxCanvas *canvas = &(blitter_video_transform->canvas);

		gst_imx_canvas_clip(
			canvas,
			&(canvas->outer_region),
			&(blitter_video_transform->input_video_info),
			blitter_video_transform->last_frame_with_cropdata ? &(blitter_video_transform->last_source_region) : NULL,
			&source_subset
		);

		gst_imx_blitter_set_input_region(blitter_video_transform->blitter, &source_subset);
		gst_imx_blitter_set_output_canvas(blitter_video_transform->blitter, canvas);
	}

	if ((input != NULL) && passthrough)
	{
		/* test for additional special cases for passthrough must not be enabled
		 * such case are transforms like rotation, deinterlacing ... */
		passthrough = passthrough && (blitter_video_transform->canvas.inner_rotation == GST_IMX_CANVAS_INNER_ROTATION_NONE) &&
		              (klass->are_transforms_necessary != NULL) &&
		              !(klass->are_transforms_necessary(blitter_video_transform, input));
	}
	else if (!blitter_video_transform->inout_info_equal)
		GST_LOG_OBJECT(transform, "input and output caps are not equal");
	else if (blitter_video_transform->last_frame_with_cropdata && !passthrough)
		GST_LOG_OBJECT(transform, "cropping is performed");
	else if (input == NULL)
		GST_LOG_OBJECT(transform, "input buffer is NULL");

	GST_IMX_BLITTER_VIDEO_TRANSFORM_UNLOCK(blitter_video_transform);

	GST_LOG_OBJECT(transform, "passthrough: %s", passthrough ? "yes" : "no");

	if (passthrough)
	{
		/* This instructs the base class to not allocate a new buffer for
		 * the output, and instead pass the input buffer as the output
		 * (this is used in the fransform_frame function below) */
		*outbuf = input;
		return GST_FLOW_OK;
	}
	else
		return GST_BASE_TRANSFORM_CLASS(gst_imx_blitter_video_transform_parent_class)->prepare_output_buffer(transform, input, outbuf);
}