コード例 #1
0
ファイル: mlt_transition.c プロジェクト: jjcare/mlt
static int get_image_b( mlt_frame b_frame, uint8_t **image, mlt_image_format *format, int *width, int *height, int writable )
{
	mlt_transition self = mlt_frame_pop_service( b_frame );
	mlt_frame a_frame = mlt_frame_pop_frame( b_frame );
	mlt_properties a_props = MLT_FRAME_PROPERTIES( a_frame );
	mlt_properties b_props = MLT_FRAME_PROPERTIES( b_frame );

	// Set scaling from A frame if not already provided.
	if ( !mlt_properties_get( b_props, "rescale.interp" ) )
	{
		const char *rescale = mlt_properties_get( a_props, "rescale.interp" );
		if ( !rescale || !strcmp( rescale, "none" ) )
			rescale = "nearest";
		mlt_properties_set( b_props, "rescale.interp", rescale );
	}

	// Ensure sane aspect ratio
	if ( mlt_frame_get_aspect_ratio( b_frame ) == 0.0 )
		mlt_frame_set_aspect_ratio( b_frame, mlt_profile_sar( mlt_service_profile( MLT_TRANSITION_SERVICE(self) ) ) );

	mlt_properties_pass_list( b_props, a_props,
		"consumer_deinterlace, deinterlace_method, consumer_tff" );

	return mlt_frame_get_image( b_frame, image, format, width, height, writable );
}
コード例 #2
0
ファイル: mlt_tractor.c プロジェクト: rt1729/mlt
static int producer_get_image( mlt_frame self, uint8_t **buffer, mlt_image_format *format, int *width, int *height, int writable )
{
	uint8_t *data = NULL;
	int size = 0;
	mlt_properties properties = MLT_FRAME_PROPERTIES( self );
	mlt_frame frame = mlt_frame_pop_service( self );
	mlt_properties frame_properties = MLT_FRAME_PROPERTIES( frame );
	mlt_properties_set( frame_properties, "rescale.interp", mlt_properties_get( properties, "rescale.interp" ) );
	mlt_properties_set_int( frame_properties, "resize_alpha", mlt_properties_get_int( properties, "resize_alpha" ) );
	mlt_properties_set_int( frame_properties, "distort", mlt_properties_get_int( properties, "distort" ) );
	mlt_properties_set_int( frame_properties, "consumer_deinterlace", mlt_properties_get_int( properties, "consumer_deinterlace" ) );
	mlt_properties_set( frame_properties, "deinterlace_method", mlt_properties_get( properties, "deinterlace_method" ) );
	mlt_properties_set_int( frame_properties, "consumer_tff", mlt_properties_get_int( properties, "consumer_tff" ) );
	mlt_frame_get_image( frame, buffer, format, width, height, writable );
	mlt_frame_set_image( self, *buffer, 0, NULL );
	mlt_properties_set_int( properties, "width", *width );
	mlt_properties_set_int( properties, "height", *height );
	mlt_properties_set_int( properties, "format", *format );
	mlt_properties_set_double( properties, "aspect_ratio", mlt_frame_get_aspect_ratio( frame ) );
	mlt_properties_set_int( properties, "progressive", mlt_properties_get_int( frame_properties, "progressive" ) );
	mlt_properties_set_int( properties, "distort", mlt_properties_get_int( frame_properties, "distort" ) );
	mlt_properties_set_int( properties, "colorspace", mlt_properties_get_int( frame_properties, "colorspace" ) );
	mlt_properties_set_int( properties, "force_full_luma", mlt_properties_get_int( frame_properties, "force_full_luma" ) );
	mlt_properties_set_int( properties, "top_field_first", mlt_properties_get_int( frame_properties, "top_field_first" ) );
	mlt_properties_set_data( properties, "movit.convert.fence",
		mlt_properties_get_data( frame_properties, "movit.convert.fence", NULL ),
		0, NULL, NULL );
	data = mlt_frame_get_alpha_mask( frame );
	mlt_properties_get_data( frame_properties, "alpha", &size );
	mlt_frame_set_alpha( self, data, size, NULL );
	self->convert_image = frame->convert_image;
	self->convert_audio = frame->convert_audio;
	return 0;
}
コード例 #3
0
ファイル: filter_resize.c プロジェクト: aib/mlt
static mlt_frame filter_process( mlt_filter filter, mlt_frame frame )
{
	// Store the aspect ratio reported by the source
	mlt_deque_push_back_double( MLT_FRAME_IMAGE_STACK( frame ), mlt_frame_get_aspect_ratio( frame ) );

	// Push this on to the service stack
	mlt_frame_push_service( frame, filter );

	// Push the get_image method on to the stack
	mlt_frame_push_get_image( frame, filter_get_image );

	return frame;
}
コード例 #4
0
ファイル: mlt_transition.c プロジェクト: jjcare/mlt
static int get_image_a( mlt_frame a_frame, uint8_t **image, mlt_image_format *format, int *width, int *height, int writable )
{
	mlt_transition self = mlt_frame_pop_service( a_frame );
	mlt_properties a_props = MLT_FRAME_PROPERTIES( a_frame );

	// All transitions get scaling
	const char *rescale = mlt_properties_get( a_props, "rescale.interp" );
	if ( !rescale || !strcmp( rescale, "none" ) )
		mlt_properties_set( a_props, "rescale.interp", "nearest" );

	// Ensure sane aspect ratio
	if ( mlt_frame_get_aspect_ratio( a_frame ) == 0.0 )
		mlt_frame_set_aspect_ratio( a_frame, mlt_profile_sar( mlt_service_profile( MLT_TRANSITION_SERVICE(self) ) ) );

	return mlt_frame_get_image( a_frame, image, format, width, height, writable );
}
コード例 #5
0
ファイル: consumer_sdl2.c プロジェクト: jliljebl/mlt
static int consumer_play_video( consumer_sdl self, mlt_frame frame )
{
	// Get the properties of this consumer
	mlt_properties properties = self->properties;

//	mlt_image_format vfmt = mlt_properties_get_int( properties, "mlt_image_format" );
	mlt_image_format vfmt = mlt_image_yuv422;
	int width = self->width, height = self->height;
	uint8_t *image;

	int video_off = mlt_properties_get_int( properties, "video_off" );
	int preview_off = mlt_properties_get_int( properties, "preview_off" );
	int display_off = video_off | preview_off;

	if ( self->running && !display_off )
	{
		// Get the image, width and height
		mlt_frame_get_image( frame, &image, &vfmt, &width, &height, 0 );

		if ( self->running )
		{
			// Determine window's new display aspect ratio
			int x = mlt_properties_get_int( properties, "window_width" );
			if ( x && x != self->window_width )
				self->window_width = x;
			x = mlt_properties_get_int( properties, "window_height" );
			if ( x && x != self->window_height )
				self->window_height = x;
			double this_aspect = ( double )self->window_width / self->window_height;

			// Get the display aspect ratio
			double display_ratio = mlt_properties_get_double( properties, "display_ratio" );

			// Determine frame's display aspect ratio
			double frame_aspect = mlt_frame_get_aspect_ratio( frame ) * width / height;

			// Store the width and height received
			self->width = width;
			self->height = height;

			// If using hardware scaler
			if ( mlt_properties_get( properties, "rescale" ) != NULL &&
				!strcmp( mlt_properties_get( properties, "rescale" ), "none" ) )
			{
				// Use hardware scaler to normalise display aspect ratio
				self->sdl_rect.w = frame_aspect / this_aspect * self->window_width;
				self->sdl_rect.h = self->window_height;
				if ( self->sdl_rect.w > self->window_width )
				{
					self->sdl_rect.w = self->window_width;
					self->sdl_rect.h = this_aspect / frame_aspect * self->window_height;
				}
			}
			// Special case optimisation to negate odd effect of sample aspect ratio
			// not corresponding exactly with image resolution.
			else if ( (int)( this_aspect * 1000 ) == (int)( display_ratio * 1000 ) ) 
			{
				self->sdl_rect.w = self->window_width;
				self->sdl_rect.h = self->window_height;
			}
			// Use hardware scaler to normalise sample aspect ratio
			else if ( self->window_height * display_ratio > self->window_width )
			{
				self->sdl_rect.w = self->window_width;
				self->sdl_rect.h = self->window_width / display_ratio;
			}
			else
			{
				self->sdl_rect.w = self->window_height * display_ratio;
				self->sdl_rect.h = self->window_height;
			}
			
			self->sdl_rect.x = ( self->window_width - self->sdl_rect.w ) / 2;
			self->sdl_rect.y = ( self->window_height - self->sdl_rect.h ) / 2;
			self->sdl_rect.x -= self->sdl_rect.x % 2;

			mlt_properties_set_int( self->properties, "rect_x", self->sdl_rect.x );
			mlt_properties_set_int( self->properties, "rect_y", self->sdl_rect.y );
			mlt_properties_set_int( self->properties, "rect_w", self->sdl_rect.w );
			mlt_properties_set_int( self->properties, "rect_h", self->sdl_rect.h );
		}

		if ( self->running && image )
		{
			unsigned char* planes[4];
			int strides[4];

			mlt_image_format_planes( vfmt, width, height, image, planes, strides );
			if ( strides[1] ) {
				SDL_UpdateYUVTexture( self->sdl_texture, NULL,
					planes[0], strides[0],
					planes[1], strides[1],
					planes[2], strides[2] );
			} else {
				SDL_UpdateTexture( self->sdl_texture, NULL, planes[0], strides[0] );
			}
			SDL_RenderClear( self->sdl_renderer );
			SDL_RenderCopy( self->sdl_renderer, self->sdl_texture, NULL, &self->sdl_rect );
			SDL_RenderPresent( self->sdl_renderer );
		}

		mlt_events_fire( properties, "consumer-frame-show", frame, NULL );
	}
	else if ( self->running )
	{
		if ( !video_off ) {
			mlt_image_format preview_format = mlt_properties_get_int( properties, "preview_format" );
			vfmt = preview_format == mlt_image_none ? mlt_image_rgb24a : preview_format;
			mlt_frame_get_image( frame, &image, &vfmt, &width, &height, 0 );
		}
		mlt_events_fire( properties, "consumer-frame-show", frame, NULL );
	}

	return 0;
}
コード例 #6
0
ファイル: filter_watermark.c プロジェクト: Enlik/mlt
static int filter_get_image( mlt_frame frame, uint8_t **image, mlt_image_format *format, int *width, int *height, int writable )
{
	// Error we will return
	int error = 0;

	// Get the watermark filter object
	mlt_filter this = mlt_frame_pop_service( frame );

	// Get the properties of the filter
	mlt_properties properties = MLT_FILTER_PROPERTIES( this );

	mlt_service_lock( MLT_FILTER_SERVICE( this ) );

	// Get the producer from the filter
	mlt_producer producer = mlt_properties_get_data( properties, "producer", NULL );

	// Get the composite from the filter
	mlt_transition composite = mlt_properties_get_data( properties, "composite", NULL );

	// Get the resource to use
	char *resource = mlt_properties_get( properties, "resource" );

	// Get the old resource
	char *old_resource = mlt_properties_get( properties, "_old_resource" );

	// Create a composite if we don't have one
	if ( composite == NULL )
	{
		// Create composite via the factory
		mlt_profile profile = mlt_service_profile( MLT_FILTER_SERVICE( this ) );
		composite = mlt_factory_transition( profile, "composite", NULL );

		// Register the composite for reuse/destruction
		if ( composite != NULL )
			mlt_properties_set_data( properties, "composite", composite, 0, ( mlt_destructor )mlt_transition_close, NULL );
	}

	// If we have one
	if ( composite != NULL )
	{
		// Get the properties
		mlt_properties composite_properties = MLT_TRANSITION_PROPERTIES( composite );

		// Pass all the composite. properties on the filter down
		mlt_properties_pass( composite_properties, properties, "composite." );

		if ( mlt_properties_get( properties, "composite.out" ) == NULL )
			mlt_properties_set_int( composite_properties, "out", mlt_properties_get_int( properties, "_out" ) );

		// Force a refresh
		mlt_properties_set_int( composite_properties, "refresh", 1 );
	}

	// Create a producer if don't have one
	if ( producer == NULL || ( old_resource != NULL && strcmp( resource, old_resource ) ) )
	{
		// Get the factory producer service
		char *factory = mlt_properties_get( properties, "factory" );

		// Create the producer
		mlt_profile profile = mlt_service_profile( MLT_FILTER_SERVICE( this ) );
		producer = mlt_factory_producer( profile, factory, resource );

		// If we have one
		if ( producer != NULL )
		{
			// Register the producer for reuse/destruction
			mlt_properties_set_data( properties, "producer", producer, 0, ( mlt_destructor )mlt_producer_close, NULL );

			// Ensure that we loop
			mlt_properties_set( MLT_PRODUCER_PROPERTIES( producer ), "eof", "loop" );

			// Set the old resource
			mlt_properties_set( properties, "_old_resource", resource );
		}
	}

	if ( producer != NULL )
	{
		// Get the producer properties
		mlt_properties producer_properties = MLT_PRODUCER_PROPERTIES( producer );

		// Now pass all producer. properties on the filter down
		mlt_properties_pass( producer_properties, properties, "producer." );
	}

	mlt_service_unlock( MLT_FILTER_SERVICE( this ) );

	// Only continue if we have both producer and composite
	if ( composite != NULL && producer != NULL )
	{
		// Get the service of the producer
		mlt_service service = MLT_PRODUCER_SERVICE( producer );

		// We will get the 'b frame' from the producer
		mlt_frame b_frame = NULL;

		// Get the original producer position
		mlt_position position = mlt_filter_get_position( this, frame );

		// Make sure the producer is in the correct position
		mlt_producer_seek( producer, position );

		// Resetting position to appease the composite transition
		mlt_frame_set_position( frame, position );

		// Get the b frame and process with composite if successful
		if ( mlt_service_get_frame( service, &b_frame, 0 ) == 0 )
		{
			// Get the a and b frame properties
			mlt_properties a_props = MLT_FRAME_PROPERTIES( frame );
			mlt_properties b_props = MLT_FRAME_PROPERTIES( b_frame );
			mlt_profile profile = mlt_service_profile( service );

			// Set the b frame to be in the same position and have same consumer requirements
			mlt_frame_set_position( b_frame, position );
			mlt_properties_set_int( b_props, "consumer_deinterlace", mlt_properties_get_int( a_props, "consumer_deinterlace" ) || mlt_properties_get_int( properties, "deinterlace" ) );

			// Check for the special case - no aspect ratio means no problem :-)
			if ( mlt_frame_get_aspect_ratio( b_frame ) == 0 )
				mlt_frame_set_aspect_ratio( b_frame, mlt_profile_sar( profile ) );
			if ( mlt_frame_get_aspect_ratio( frame ) == 0 )
				mlt_frame_set_aspect_ratio( frame, mlt_profile_sar( profile ) );

			if ( mlt_properties_get_int( properties, "distort" ) )
			{
				mlt_properties_set_int( MLT_TRANSITION_PROPERTIES( composite ), "distort", 1 );
				mlt_properties_set_int( a_props, "distort", 1 );
				mlt_properties_set_int( b_props, "distort", 1 );
			}

			*format = mlt_image_yuv422;
			if ( mlt_properties_get_int( properties, "reverse" ) == 0 )
			{
				// Apply all filters that are attached to this filter to the b frame
				mlt_service_apply_filters( MLT_FILTER_SERVICE( this ), b_frame, 0 );

				// Process the frame
				mlt_transition_process( composite, frame, b_frame );

				// Get the image
				error = mlt_frame_get_image( frame, image, format, width, height, 1 );
			}
			else
			{
				char temp[ 132 ];
				int count = 0;
				uint8_t *alpha = NULL;
				const char *rescale = mlt_properties_get( a_props, "rescale.interp" );
				if ( rescale == NULL || !strcmp( rescale, "none" ) )
					rescale = "hyper";
				mlt_transition_process( composite, b_frame, frame );
				mlt_properties_set_int( a_props, "consumer_deinterlace", 1 );
				mlt_properties_set_int( b_props, "consumer_deinterlace", 1 );
				mlt_properties_set( a_props, "rescale.interp", rescale );
				mlt_properties_set( b_props, "rescale.interp", rescale );
				mlt_service_apply_filters( MLT_FILTER_SERVICE( this ), b_frame, 0 );
				error = mlt_frame_get_image( b_frame, image, format, width, height, 1 );
				alpha = mlt_frame_get_alpha_mask( b_frame );
				mlt_frame_set_image( frame, *image, *width * *height * 2, NULL );
				mlt_frame_set_alpha( frame, alpha, *width * *height, NULL );
				mlt_properties_set_int( a_props, "width", *width );
				mlt_properties_set_int( a_props, "height", *height );
				mlt_properties_set_int( a_props, "progressive", 1 );
				mlt_properties_inc_ref( b_props );
				strcpy( temp, "_b_frame" );
				while( mlt_properties_get_data( a_props, temp, NULL ) != NULL )
					sprintf( temp, "_b_frame%d", count ++ );
				mlt_properties_set_data( a_props, temp, b_frame, 0, ( mlt_destructor )mlt_frame_close, NULL );
			}
		}

		// Close the b frame
		mlt_frame_close( b_frame );
	}
	else
	{
		// Get the image from the frame without running fx
		error = mlt_frame_get_image( frame, image, format, width, height, 1 );
	}

	return error;
}
コード例 #7
0
ファイル: filter_crop.c プロジェクト: Enlik/mlt
static mlt_frame filter_process( mlt_filter filter, mlt_frame frame )
{
	if ( mlt_properties_get_int( MLT_FILTER_PROPERTIES( filter ), "active" ) )
	{
		// Push the get_image method on to the stack
		mlt_frame_push_service( frame, mlt_service_profile( MLT_FILTER_SERVICE( filter ) ) );
		mlt_frame_push_get_image( frame, filter_get_image );
	}
	else
	{
		mlt_properties filter_props = MLT_FILTER_PROPERTIES( filter );
		mlt_properties frame_props = MLT_FRAME_PROPERTIES( frame );
		int left   = mlt_properties_get_int( filter_props, "left" );
		int right  = mlt_properties_get_int( filter_props, "right" );
		int top    = mlt_properties_get_int( filter_props, "top" );
		int bottom = mlt_properties_get_int( filter_props, "bottom" );
		int width  = mlt_properties_get_int( frame_props, "meta.media.width" );
		int height = mlt_properties_get_int( frame_props, "meta.media.height" );
		int use_profile = mlt_properties_get_int( filter_props, "use_profile" );
		mlt_profile profile = mlt_service_profile( MLT_FILTER_SERVICE( filter ) );

		if ( use_profile )
		{
			top = top * height / profile->height;
			bottom = bottom * height / profile->height;
			left = left * width / profile->width;
			right = right * width / profile->width;
		}
		if ( mlt_properties_get_int( filter_props, "center" ) )
		{
			double aspect_ratio = mlt_frame_get_aspect_ratio( frame );
			if ( aspect_ratio == 0.0 )
				aspect_ratio = mlt_profile_sar( profile );
			double input_ar = aspect_ratio * width / height;
			double output_ar = mlt_profile_dar( mlt_service_profile( MLT_FILTER_SERVICE(filter) ) );
			int bias = mlt_properties_get_int( filter_props, "center_bias" );
			
			if ( input_ar > output_ar )
			{
				left = right = ( width - rint( output_ar * height / aspect_ratio ) ) / 2;
				if ( abs(bias) > left )
					bias = bias < 0 ? -left : left;
				else if ( use_profile )
					bias = bias * width / profile->width;
				left -= bias;
				right += bias;
			}
			else
			{
				top = bottom = ( height - rint( aspect_ratio * width / output_ar ) ) / 2;
				if ( abs(bias) > top )
					bias = bias < 0 ? -top : top;
				else if ( use_profile )
					bias = bias * height / profile->height;
				top -= bias;
				bottom += bias;
			}
		}		

		// Coerce the output to an even width because subsampled YUV with odd widths is too
		// risky for downstream processing to handle correctly.
		left += ( width - left - right ) & 1;
		if ( width - left - right < 8 )
			left = right = 0;
		if ( height - top - bottom < 8 )
			top = bottom = 0;
		mlt_properties_set_int( frame_props, "crop.left", left );
		mlt_properties_set_int( frame_props, "crop.right", right );
		mlt_properties_set_int( frame_props, "crop.top", top );
		mlt_properties_set_int( frame_props, "crop.bottom", bottom );
		mlt_properties_set_int( frame_props, "crop.original_width", width );
		mlt_properties_set_int( frame_props, "crop.original_height", height );
		mlt_properties_set_int( frame_props, "meta.media.width", width - left - right );
		mlt_properties_set_int( frame_props, "meta.media.height", height - top - bottom );
	}
	return frame;
}
コード例 #8
0
ファイル: mlt_frame.c プロジェクト: agpanarin/mlt
static int generate_test_image( mlt_properties properties, uint8_t **buffer,  mlt_image_format *format, int *width, int *height, int writable )
{
	mlt_producer producer = mlt_properties_get_data( properties, "test_card_producer", NULL );
	mlt_image_format requested_format = *format;
	int error = 1;

	if ( producer )
	{
		mlt_frame test_frame = NULL;
		mlt_service_get_frame( MLT_PRODUCER_SERVICE( producer ), &test_frame, 0 );
		if ( test_frame )
		{
			mlt_properties test_properties = MLT_FRAME_PROPERTIES( test_frame );
			mlt_properties_set_data( properties, "test_card_frame", test_frame, 0, ( mlt_destructor )mlt_frame_close, NULL );
			mlt_properties_set( test_properties, "rescale.interp", mlt_properties_get( properties, "rescale.interp" ) );
			error = mlt_frame_get_image( test_frame, buffer, format, width, height, writable );
			if ( !error && buffer && *buffer )
			{
				mlt_properties_set_double( properties, "aspect_ratio", mlt_frame_get_aspect_ratio( test_frame ) );
				mlt_properties_set_int( properties, "width", *width );
				mlt_properties_set_int( properties, "height", *height );
				if ( test_frame->convert_image && requested_format != mlt_image_none )
					test_frame->convert_image( test_frame, buffer, format, requested_format );
				mlt_properties_set_int( properties, "format", *format );
			}
		}
		else
		{
			mlt_properties_set_data( properties, "test_card_producer", NULL, 0, NULL, NULL );
		}
	}
	if ( error && buffer && *format != mlt_image_none )
	{
		int size = 0;

		*width = *width == 0 ? 720 : *width;
		*height = *height == 0 ? 576 : *height;
		size = *width * *height;

		mlt_properties_set_int( properties, "format", *format );
		mlt_properties_set_int( properties, "width", *width );
		mlt_properties_set_int( properties, "height", *height );
		mlt_properties_set_double( properties, "aspect_ratio", 1.0 );

		switch( *format )
		{
			case mlt_image_rgb24:
				size *= 3;
				size += *width * 3;
				*buffer = mlt_pool_alloc( size );
				if ( *buffer )
					memset( *buffer, 255, size );
				break;
			case mlt_image_rgb24a:
			case mlt_image_opengl:
				size *= 4;
				size += *width * 4;
				*buffer = mlt_pool_alloc( size );
				if ( *buffer )
					memset( *buffer, 255, size );
				break;
			case mlt_image_yuv422:
				size *= 2;
				size += *width * 2;
				*buffer = mlt_pool_alloc( size );
				if ( *buffer )
				{
					register uint8_t *p = *buffer;
					register uint8_t *q = p + size;
					while ( p != NULL && p != q )
					{
						*p ++ = 235;
						*p ++ = 128;
					}
				}
				break;
			case mlt_image_yuv420p:
				*buffer = mlt_pool_alloc( size * 3 / 2 );
				if ( *buffer )
				{
					memset( *buffer, 235, size );
					memset( *buffer + size, 128, size / 2 );
				}
				break;
			default:
				size = 0;
				break;
		}
		mlt_properties_set_data( properties, "image", *buffer, size, ( mlt_destructor )mlt_pool_release, NULL );
		mlt_properties_set_int( properties, "test_image", 1 );
		error = 0;
	}
	return error;
}
コード例 #9
0
ファイル: transition_affine.c プロジェクト: mltframework/mlt
static int transition_get_image( mlt_frame a_frame, uint8_t **image, mlt_image_format *format, int *width, int *height, int writable )
{
	// Get the b frame from the stack
	mlt_frame b_frame = mlt_frame_pop_frame( a_frame );

	// Get the transition object
	mlt_transition transition = mlt_frame_pop_service( a_frame );

	// Get the properties of the transition
	mlt_properties properties = MLT_TRANSITION_PROPERTIES( transition );

	// Get the properties of the a frame
	mlt_properties a_props = MLT_FRAME_PROPERTIES( a_frame );

	// Get the properties of the b frame
	mlt_properties b_props = MLT_FRAME_PROPERTIES( b_frame );

	// Image, format, width, height and image for the b frame
	uint8_t *b_image = NULL;
	mlt_image_format b_format = mlt_image_rgb24a;
	int b_width = mlt_properties_get_int( b_props, "meta.media.width" );
	int b_height = mlt_properties_get_int( b_props, "meta.media.height" );
	double b_ar = mlt_frame_get_aspect_ratio( b_frame );
	double b_dar = b_ar * b_width / b_height;

	// Assign the current position
	mlt_position position =  mlt_transition_get_position( transition, a_frame );

	int mirror = mlt_properties_get_position( properties, "mirror" );
	int length = mlt_transition_get_length( transition );
	if ( mlt_properties_get_int( properties, "always_active" ) )
	{
		mlt_properties props = mlt_properties_get_data( b_props, "_producer", NULL );
		mlt_position in = mlt_properties_get_int( props, "in" );
		mlt_position out = mlt_properties_get_int( props, "out" );
		length = out - in + 1;
	}

	// Obtain the normalised width and height from the a_frame
	mlt_profile profile = mlt_service_profile( MLT_TRANSITION_SERVICE( transition ) );
	int normalised_width = profile->width;
	int normalised_height = profile->height;

	double consumer_ar = mlt_profile_sar( profile );

	if ( mirror && position > length / 2 )
		position = abs( position - length );

	// Fetch the a frame image
	*format = mlt_image_rgb24a;
	int error = mlt_frame_get_image( a_frame, image, format, width, height, 1 );
	if (error || !image)
		return error;

	// Calculate the region now
	mlt_rect result = {0, 0, normalised_width, normalised_height, 1.0};
	mlt_service_lock( MLT_TRANSITION_SERVICE( transition ) );

	if (mlt_properties_get(properties, "geometry"))
	{
		// Structures for geometry
		struct mlt_geometry_item_s geometry;
		composite_calculate( transition, &geometry, normalised_width, normalised_height, ( double )position );
		result.x = geometry.x;
		result.y = geometry.y;
		result.w = geometry.w;
		result.h = geometry.h;
		result.o = geometry.mix / 100.0f;
	}
	else if (mlt_properties_get(properties, "rect"))
	{
		// Determine length and obtain cycle
		double cycle = mlt_properties_get_double( properties, "cycle" );
	
		// Allow a repeat cycle
		if ( cycle >= 1 )
			length = cycle;
		else if ( cycle > 0 )
			length *= cycle;
		
		mlt_position anim_pos = repeat_position(properties, "rect", position, length);
		result = mlt_properties_anim_get_rect(properties, "rect", anim_pos, length);
		if (mlt_properties_get(properties, "rect") && strchr(mlt_properties_get(properties, "rect"), '%')) {
			result.x *= normalised_width;
			result.y *= normalised_height;
			result.w *= normalised_width;
			result.h *= normalised_height;
		}
		result.o = (result.o == DBL_MIN)? 1.0 : MIN(result.o, 1.0);
	}
	mlt_service_unlock( MLT_TRANSITION_SERVICE( transition ) );

	double geometry_w = result.w;
	double geometry_h = result.h;

	if ( !mlt_properties_get_int( properties, "fill" ) )
	{
		double geometry_dar = result.w * consumer_ar / result.h;

		if ( b_dar > geometry_dar )
		{
			result.w = MIN( result.w, b_width * b_ar / consumer_ar );
			result.h = result.w * consumer_ar / b_dar;
		}
		else
		{
			result.h = MIN( result.h, b_height );
			result.w = result.h * b_dar / consumer_ar;
		}
	}

	// Fetch the b frame image
	result.w = ( result.w * *width / normalised_width );
	result.h = ( result.h * *height / normalised_height );
	result.x = ( result.x * *width / normalised_width );
	result.y = ( result.y * *height / normalised_height );

	if (mlt_properties_get_int(properties, "b_scaled")) {
		// Request b frame image size just what is needed.
		b_width = result.w;
		b_height = result.h;
		// Set the rescale interpolation to match the frame
		mlt_properties_set( b_props, "rescale.interp", mlt_properties_get( a_props, "rescale.interp" ) );
	} else {
		// Request full resolution of b frame image.
		mlt_properties_set_int( b_props, "rescale_width", b_width );
		mlt_properties_set_int( b_props, "rescale_height", b_height );

		// Suppress padding and aspect normalization.
		mlt_properties_set( b_props, "rescale.interp", "none" );
	}

	// This is not a field-aware transform.
	mlt_properties_set_int( b_props, "consumer_deinterlace", 1 );

	error = mlt_frame_get_image( b_frame, &b_image, &b_format, &b_width, &b_height, 0 );
	if (error || !b_image) {
		// Remove potentially large image on the B frame. 
		mlt_frame_set_image( b_frame, NULL, 0, NULL );
		return error;
	}

	// Check that both images are of the correct format and process
	if ( *format == mlt_image_rgb24a && b_format == mlt_image_rgb24a )
	{
		double sw, sh;
		// Get values from the transition
		double scale_x = mlt_properties_anim_get_double( properties, "scale_x", position, length );
		double scale_y = mlt_properties_anim_get_double( properties, "scale_y", position, length );
		int scale = mlt_properties_get_int( properties, "scale" );
		double geom_scale_x = (double) b_width / result.w;
		double geom_scale_y = (double) b_height / result.h;
		struct sliced_desc desc = {
			.a_image = *image,
			.b_image = b_image,
			.interp = interpBL_b32,
			.a_width = *width,
			.a_height = *height,
			.b_width = b_width,
			.b_height = b_height,
			.lower_x = -(result.x + result.w / 2.0), // center
			.lower_y = -(result.y + result.h / 2.0), // middle
			.mix = result.o,
			.x_offset = (double) b_width / 2.0,
			.y_offset = (double) b_height / 2.0,
			.b_alpha = mlt_properties_get_int( properties, "b_alpha" ),
			// Affine boundaries
			.minima = 0,
			.xmax = b_width - 1,
			.ymax = b_height - 1
		};

		// Recalculate vars if alignment supplied.
		if ( mlt_properties_get( properties, "halign" ) || mlt_properties_get( properties, "valign" ) )
		{
			double halign = alignment_parse( mlt_properties_get( properties, "halign" ) );
			double valign = alignment_parse( mlt_properties_get( properties, "valign" ) );
			desc.x_offset = halign * b_width / 2.0;
			desc.y_offset = valign * b_height / 2.0;
			desc.lower_x = -(result.x + geometry_w * halign / 2.0f);
			desc.lower_y = -(result.y + geometry_h * valign / 2.0f);
		}

		affine_init( desc.affine.matrix );

		// Compute the affine transform
		get_affine( &desc.affine, transition, ( double )position, length );
		desc.dz = MapZ( desc.affine.matrix, 0, 0 );
		if ( (int) fabs( desc.dz * 1000 ) < 25 )
			return 0;

		// Factor scaling into the transformation based on output resolution.
		if ( mlt_properties_get_int( properties, "distort" ) )
		{
			scale_x = geom_scale_x * ( scale_x == 0 ? 1 : scale_x );
			scale_y = geom_scale_y * ( scale_y == 0 ? 1 : scale_y );
		}
		else
		{
			// Determine scale with respect to aspect ratio.
			double consumer_dar = consumer_ar * normalised_width / normalised_height;
			
			if ( b_dar > consumer_dar )
			{
				scale_x = geom_scale_x * ( scale_x == 0 ? 1 : scale_x );
				scale_y = geom_scale_x * ( scale_y == 0 ? 1 : scale_y );
				scale_y *= b_ar / consumer_ar;
			}
			else
			{
				scale_x = geom_scale_y * ( scale_x == 0 ? 1 : scale_x );
				scale_y = geom_scale_y * ( scale_y == 0 ? 1 : scale_y );
				scale_x *= consumer_ar / b_ar;
			}
		}
		if ( scale )
		{
			affine_max_output( desc.affine.matrix, &sw, &sh, desc.dz, *width, *height );
			affine_scale( desc.affine.matrix, sw * MIN( geom_scale_x, geom_scale_y ), sh * MIN( geom_scale_x, geom_scale_y ) );
		}
		else if ( scale_x != 0 && scale_y != 0 )
		{
			affine_scale( desc.affine.matrix, scale_x, scale_y );
		}


		char *interps = mlt_properties_get( a_props, "rescale.interp" );
		// Copy in case string is changed.
		if ( interps )
			interps = strdup( interps );

		// Set the interpolation function
		if ( interps == NULL || strcmp( interps, "nearest" ) == 0 || strcmp( interps, "neighbor" ) == 0 || strcmp( interps, "tiles" ) == 0 || strcmp( interps, "fast_bilinear" ) == 0 )
		{
			desc.interp = interpNN_b32;
			// uses lrintf. Values should be >= -0.5 and < max + 0.5
			desc.minima -= 0.5;
			desc.xmax += 0.49;
			desc.ymax += 0.49;
		}
		else if ( strcmp( interps, "bilinear" ) == 0 )
		{
			desc.interp = interpBL_b32;
			// uses floorf.
		}
		else if ( strcmp( interps, "bicubic" ) == 0 ||  strcmp( interps, "hyper" ) == 0 || strcmp( interps, "sinc" ) == 0 || strcmp( interps, "lanczos" ) == 0 || strcmp( interps, "spline" ) == 0 )
		{
			// TODO: lanczos 8x8
			// TODO: spline 4x4 or 6x6
			desc.interp = interpBC_b32;
			// uses ceilf. Values should be > -1 and <= max.
			desc.minima -= 1;
		}
		free( interps );

		// Do the transform with interpolation
		int threads = mlt_properties_get_int(properties, "threads");
		threads = CLAMP(threads, 0, mlt_slices_count_normal());
		if (threads == 1)
			sliced_proc(0, 0, 1, &desc);
		else
			mlt_slices_run_normal(threads, sliced_proc, &desc);
		
		// Remove potentially large image on the B frame. 
		mlt_frame_set_image( b_frame, NULL, 0, NULL );
	}

	return 0;
}
コード例 #10
0
ファイル: filter_qtblend.c プロジェクト: j-b-m/mlt
static int filter_get_image( mlt_frame frame, uint8_t **image, mlt_image_format *format, int *width, int *height, int writable )
{
	// Get the filter
	mlt_filter filter = mlt_frame_pop_service( frame );

	// Get the properties
	mlt_properties properties = MLT_FILTER_PROPERTIES( filter );

	// Get the image
	int error = 0;
	*format = mlt_image_rgb24a;

	// Only process if we have no error and a valid colour space
	if ( error == 0 )
	{
		mlt_service_lock( MLT_FILTER_SERVICE( filter ) );
		mlt_producer producer = mlt_properties_get_data( properties, "producer", NULL );
		mlt_transition transition = mlt_properties_get_data( properties, "transition", NULL );
		mlt_frame a_frame = NULL;
		mlt_profile profile = mlt_service_profile( MLT_FILTER_SERVICE( filter ) );

		if ( producer == NULL )
		{
			char *background = mlt_properties_get( properties, "background" );
			producer = mlt_factory_producer( profile, NULL, background );
			mlt_properties_set_data( properties, "producer", producer, 0, (mlt_destructor)mlt_producer_close, NULL );
		}

		if ( transition == NULL )
		{
			transition = mlt_factory_transition( profile, "qtblend", NULL );
			mlt_properties_set_data( properties, "transition", transition, 0, (mlt_destructor)mlt_transition_close, NULL );
			if ( transition )
				mlt_properties_set_int( MLT_TRANSITION_PROPERTIES( transition ), "b_alpha", 1 );
		}

		if ( producer != NULL && transition != NULL )
		{
			mlt_position position = mlt_filter_get_position( filter, frame );
			mlt_properties frame_properties = MLT_FRAME_PROPERTIES( frame );
			mlt_position in = mlt_filter_get_in( filter );
			mlt_position out = mlt_filter_get_out( filter );
			double consumer_ar = mlt_profile_sar( profile );
			mlt_transition_set_in_and_out( transition, in, out );
			if ( out > 0 ) {
				mlt_properties_set_position( MLT_PRODUCER_PROPERTIES( producer ), "length", out - in + 1 );
				mlt_producer_set_in_and_out( producer, in, out );
			}
			mlt_producer_seek( producer, in + position );
			mlt_frame_set_position( frame, position );
			mlt_properties_pass( MLT_PRODUCER_PROPERTIES( producer ), properties, "producer." );
			mlt_properties_pass( MLT_TRANSITION_PROPERTIES( transition ), properties, "transition." );
			mlt_service_get_frame( MLT_PRODUCER_SERVICE( producer ), &a_frame, 0 );
			mlt_frame_set_position( a_frame, in + position );

			// Set the rescale interpolation to match the frame
			mlt_properties_set( MLT_FRAME_PROPERTIES( a_frame ), "rescale.interp", mlt_properties_get( frame_properties, "rescale.interp" ) );

			// Special case - aspect_ratio = 0
			if ( mlt_frame_get_aspect_ratio( frame ) == 0 )
				mlt_frame_set_aspect_ratio( frame, consumer_ar );
			if ( mlt_frame_get_aspect_ratio( a_frame ) == 0 )
				mlt_frame_set_aspect_ratio( a_frame, consumer_ar );

			// Add the qtblend transition onto the frame stack
			mlt_service_unlock( MLT_FILTER_SERVICE( filter ) );
			mlt_transition_process( transition, a_frame, frame );

			if ( mlt_properties_get_int( properties, "use_normalised" ) )
			{
				// Use the normalised width & height
				mlt_profile profile = mlt_service_profile( MLT_FILTER_SERVICE( filter ) );
				*width = profile->width;
				*height = profile->height;
			}
			
			mlt_frame_get_image( a_frame, image, format, width, height, writable );
			mlt_properties_set_data( frame_properties, "affine_frame", a_frame, 0, (mlt_destructor)mlt_frame_close, NULL );
			mlt_frame_set_image( frame, *image, *width * *height * 4, NULL );
			//mlt_frame_set_alpha( frame, mlt_frame_get_alpha_mask( a_frame ), *width * *height, NULL );
		}
		else
		{
			mlt_service_unlock( MLT_FILTER_SERVICE( filter ) );
		}
	}

	return error;
}
コード例 #11
0
ファイル: mlt_frame.c プロジェクト: mcfrisk/mlt
int mlt_frame_get_image( mlt_frame self, uint8_t **buffer, mlt_image_format *format, int *width, int *height, int writable )
{
	mlt_properties properties = MLT_FRAME_PROPERTIES( self );
	mlt_get_image get_image = mlt_frame_pop_get_image( self );
	mlt_producer producer = mlt_properties_get_data( properties, "test_card_producer", NULL );
	mlt_image_format requested_format = *format;
	int error = 0;

	if ( get_image )
	{
		mlt_properties_set_int( properties, "image_count", mlt_properties_get_int( properties, "image_count" ) - 1 );
		error = get_image( self, buffer, format, width, height, writable );
		if ( !error && *buffer )
		{
			mlt_properties_set_int( properties, "width", *width );
			mlt_properties_set_int( properties, "height", *height );
			if ( self->convert_image && *buffer && requested_format != mlt_image_none )
				self->convert_image( self, buffer, format, requested_format );
			mlt_properties_set_int( properties, "format", *format );
		}
		else
		{
			// Cause the image to be loaded from test card or fallback (white) below.
			mlt_frame_get_image( self, buffer, format, width, height, writable );
		}
	}
	else if ( mlt_properties_get_data( properties, "image", NULL ) )
	{
		*format = mlt_properties_get_int( properties, "format" );
		*buffer = mlt_properties_get_data( properties, "image", NULL );
		*width = mlt_properties_get_int( properties, "width" );
		*height = mlt_properties_get_int( properties, "height" );
		if ( self->convert_image && *buffer && requested_format != mlt_image_none )
		{
			self->convert_image( self, buffer, format, requested_format );
			mlt_properties_set_int( properties, "format", *format );
		}
	}
	else if ( producer )
	{
		mlt_frame test_frame = NULL;
		mlt_service_get_frame( MLT_PRODUCER_SERVICE( producer ), &test_frame, 0 );
		if ( test_frame )
		{
			mlt_properties test_properties = MLT_FRAME_PROPERTIES( test_frame );
			mlt_properties_set( test_properties, "rescale.interp", mlt_properties_get( properties, "rescale.interp" ) );
			mlt_frame_get_image( test_frame, buffer, format, width, height, writable );
			mlt_properties_set_data( properties, "test_card_frame", test_frame, 0, ( mlt_destructor )mlt_frame_close, NULL );
			mlt_properties_set_double( properties, "aspect_ratio", mlt_frame_get_aspect_ratio( test_frame ) );
// 			mlt_properties_set_data( properties, "image", *buffer, *width * *height * 2, NULL, NULL );
// 			mlt_properties_set_int( properties, "width", *width );
// 			mlt_properties_set_int( properties, "height", *height );
// 			mlt_properties_set_int( properties, "format", *format );
		}
		else
		{
			mlt_properties_set_data( properties, "test_card_producer", NULL, 0, NULL, NULL );
			mlt_frame_get_image( self, buffer, format, width, height, writable );
		}
	}
	else
	{
		register uint8_t *p;
		register uint8_t *q;
		int size = 0;

		*width = *width == 0 ? 720 : *width;
		*height = *height == 0 ? 576 : *height;
		size = *width * *height;

		mlt_properties_set_int( properties, "format", *format );
		mlt_properties_set_int( properties, "width", *width );
		mlt_properties_set_int( properties, "height", *height );
		mlt_properties_set_int( properties, "aspect_ratio", 0 );

		switch( *format )
		{
			case mlt_image_none:
				size = 0;
				*buffer = NULL;
				break;
			case mlt_image_rgb24:
				size *= 3;
				size += *width * 3;
				*buffer = mlt_pool_alloc( size );
				if ( *buffer )
					memset( *buffer, 255, size );
				break;
			case mlt_image_rgb24a:
			case mlt_image_opengl:
				size *= 4;
				size += *width * 4;
				*buffer = mlt_pool_alloc( size );
				if ( *buffer )
					memset( *buffer, 255, size );
				break;
			case mlt_image_yuv422:
				size *= 2;
				size += *width * 2;
				*buffer = mlt_pool_alloc( size );
				p = *buffer;
				q = p + size;
				while ( p != NULL && p != q )
				{
					*p ++ = 235;
					*p ++ = 128;
				}
				break;
			case mlt_image_yuv420p:
				size = size * 3 / 2;
				*buffer = mlt_pool_alloc( size );
				if ( *buffer )
					memset( *buffer, 255, size );
				break;
		}

		mlt_properties_set_data( properties, "image", *buffer, size, ( mlt_destructor )mlt_pool_release, NULL );
		mlt_properties_set_int( properties, "test_image", 1 );
	}

	return error;
}
コード例 #12
0
ファイル: transition_affine.c プロジェクト: elfring/mlt
static int transition_get_image( mlt_frame a_frame, uint8_t **image, mlt_image_format *format, int *width, int *height, int writable )
{
	// Get the b frame from the stack
	mlt_frame b_frame = mlt_frame_pop_frame( a_frame );

	// Get the transition object
	mlt_transition transition = mlt_frame_pop_service( a_frame );

	// Get the properties of the transition
	mlt_properties properties = MLT_TRANSITION_PROPERTIES( transition );

	// Get the properties of the a frame
	mlt_properties a_props = MLT_FRAME_PROPERTIES( a_frame );

	// Get the properties of the b frame
	mlt_properties b_props = MLT_FRAME_PROPERTIES( b_frame );

	// Image, format, width, height and image for the b frame
	uint8_t *b_image = NULL;
	mlt_image_format b_format = mlt_image_rgb24a;
	int b_width = mlt_properties_get_int( b_props, "meta.media.width" );
	int b_height = mlt_properties_get_int( b_props, "meta.media.height" );
	double b_ar = mlt_frame_get_aspect_ratio( b_frame );
	double b_dar = b_ar * b_width / b_height;

	// Assign the current position
	mlt_position position =  mlt_transition_get_position( transition, a_frame );

	int mirror = mlt_properties_get_position( properties, "mirror" );
	int length = mlt_transition_get_length( transition );
	if ( mlt_properties_get_int( properties, "always_active" ) )
	{
		mlt_properties props = mlt_properties_get_data( b_props, "_producer", NULL );
		mlt_position in = mlt_properties_get_int( props, "in" );
		mlt_position out = mlt_properties_get_int( props, "out" );
		length = out - in + 1;
	}

	// Obtain the normalised width and height from the a_frame
	mlt_profile profile = mlt_service_profile( MLT_TRANSITION_SERVICE( transition ) );
	int normalised_width = profile->width;
	int normalised_height = profile->height;

	double consumer_ar = mlt_profile_sar( profile );

	// Structures for geometry
	struct mlt_geometry_item_s result;

	if ( mirror && position > length / 2 )
		position = abs( position - length );

	// Fetch the a frame image
	*format = mlt_image_rgb24a;
	mlt_frame_get_image( a_frame, image, format, width, height, 1 );

	// Calculate the region now
	mlt_service_lock( MLT_TRANSITION_SERVICE( transition ) );
	composite_calculate( transition, &result, normalised_width, normalised_height, ( float )position );
	mlt_service_unlock( MLT_TRANSITION_SERVICE( transition ) );

	float geometry_w = result.w;
	float geometry_h = result.h;

	if ( !mlt_properties_get_int( properties, "fill" ) )
	{
		double geometry_dar = result.w * consumer_ar / result.h;

		if ( b_dar > geometry_dar )
		{
			result.w = MIN( result.w, b_width * b_ar / consumer_ar );
			result.h = result.w * consumer_ar / b_dar;
		}
		else
		{
			result.h = MIN( result.h, b_height );
			result.w = result.h * b_dar / consumer_ar;
		}
	}

	// Fetch the b frame image
	result.w = ( result.w * *width / normalised_width );
	result.h = ( result.h * *height / normalised_height );
	result.x = ( result.x * *width / normalised_width );
	result.y = ( result.y * *height / normalised_height );

	// Request full resolution of b frame image.
	mlt_properties_set_int( b_props, "rescale_width", b_width );
	mlt_properties_set_int( b_props, "rescale_height", b_height );

	// Suppress padding and aspect normalization.
	char *interps = mlt_properties_get( a_props, "rescale.interp" );
	if ( interps )
		interps = strdup( interps );
	mlt_properties_set( b_props, "rescale.interp", "none" );

	// This is not a field-aware transform.
	mlt_properties_set_int( b_props, "consumer_deinterlace", 1 );

	mlt_frame_get_image( b_frame, &b_image, &b_format, &b_width, &b_height, 0 );

	// Check that both images are of the correct format and process
	if ( *format == mlt_image_rgb24a && b_format == mlt_image_rgb24a )
	{
		float x, y;
		float dx, dy;
		float dz;
		float sw, sh;
		uint8_t *p = *image;

		// Get values from the transition
		float scale_x = mlt_properties_get_double( properties, "scale_x" );
		float scale_y = mlt_properties_get_double( properties, "scale_y" );
		int scale = mlt_properties_get_int( properties, "scale" );
		int b_alpha = mlt_properties_get_int( properties, "b_alpha" );
		float geom_scale_x = (float) b_width / result.w;
		float geom_scale_y = (float) b_height / result.h;
		float cx = result.x + result.w / 2.0;
		float cy = result.y + result.h / 2.0;
		float lower_x = - cx;
		float lower_y = - cy;
		float x_offset = (float) b_width / 2.0;
		float y_offset = (float) b_height / 2.0;
		affine_t affine;
		interpp interp = interpBL_b32;
		int i, j; // loop counters

		// Recalculate vars if alignment supplied.
		if ( mlt_properties_get( properties, "halign" ) || mlt_properties_get( properties, "valign" ) )
		{
			float halign = alignment_parse( mlt_properties_get( properties, "halign" ) );
			float valign = alignment_parse( mlt_properties_get( properties, "valign" ) );
			x_offset = halign * b_width / 2.0f;
			y_offset = valign * b_height / 2.0f;
			cx = result.x + geometry_w * halign / 2.0f;
			cy = result.y + geometry_h * valign / 2.0f;
			lower_x = -cx;
			lower_y = -cy;
		}

		affine_init( affine.matrix );

		// Compute the affine transform
		get_affine( &affine, transition, ( float )position );
		dz = MapZ( affine.matrix, 0, 0 );
		if ( ( int )abs( dz * 1000 ) < 25 )
		{
			if ( interps )
				free( interps );
			return 0;
		}

		// Factor scaling into the transformation based on output resolution.
		if ( mlt_properties_get_int( properties, "distort" ) )
		{
			scale_x = geom_scale_x * ( scale_x == 0 ? 1 : scale_x );
			scale_y = geom_scale_y * ( scale_y == 0 ? 1 : scale_y );
		}
		else
		{
			// Determine scale with respect to aspect ratio.
			double consumer_dar = consumer_ar * normalised_width / normalised_height;
			
			if ( b_dar > consumer_dar )
			{
				scale_x = geom_scale_x * ( scale_x == 0 ? 1 : scale_x );
				scale_y = geom_scale_x * ( scale_y == 0 ? 1 : scale_y );
				scale_y *= b_ar / consumer_ar;
			}
			else
			{
				scale_x = geom_scale_y * ( scale_x == 0 ? 1 : scale_x );
				scale_y = geom_scale_y * ( scale_y == 0 ? 1 : scale_y );
				scale_x *= consumer_ar / b_ar;
			}
		}
		if ( scale )
		{
			affine_max_output( affine.matrix, &sw, &sh, dz, *width, *height );
			affine_scale( affine.matrix, sw * MIN( geom_scale_x, geom_scale_y ), sh * MIN( geom_scale_x, geom_scale_y ) );
		}
		else if ( scale_x != 0 && scale_y != 0 )
		{
			affine_scale( affine.matrix, scale_x, scale_y );
		}

		// Set the interpolation function
		if ( interps == NULL || strcmp( interps, "nearest" ) == 0 || strcmp( interps, "neighbor" ) == 0 )
			interp = interpNN_b32;
		else if ( strcmp( interps, "tiles" ) == 0 || strcmp( interps, "fast_bilinear" ) == 0 )
			interp = interpNN_b32;
		else if ( strcmp( interps, "bilinear" ) == 0 )
			interp = interpBL_b32;
		else if ( strcmp( interps, "bicubic" ) == 0 )
			interp = interpBC_b32;
		 // TODO: lanczos 8x8
		else if ( strcmp( interps, "hyper" ) == 0 || strcmp( interps, "sinc" ) == 0 || strcmp( interps, "lanczos" ) == 0 )
			interp = interpBC_b32;
		else if ( strcmp( interps, "spline" ) == 0 ) // TODO: spline 4x4 or 6x6
			interp = interpBC_b32;

		// Do the transform with interpolation
		for ( i = 0, y = lower_y; i < *height; i++, y++ )
		{
			for ( j = 0, x = lower_x; j < *width; j++, x++ )
			{
				dx = MapX( affine.matrix, x, y ) / dz + x_offset;
				dy = MapY( affine.matrix, x, y ) / dz + y_offset;
				if ( dx >= 0 && dx < (b_width - 1) && dy >=0 && dy < (b_height - 1) )
					interp( b_image, b_width, b_height, dx, dy, result.mix/100.0, p, b_alpha );
				p += 4;
			}
		}
	}
	if ( interps )
		free( interps );

	return 0;
}